[ 487.381976] env[62619]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=62619) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 487.382337] env[62619]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=62619) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 487.382459] env[62619]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=62619) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 487.382736] env[62619]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 487.479019] env[62619]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=62619) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:390}} [ 487.488152] env[62619]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.010s {{(pid=62619) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:428}} [ 488.088094] env[62619]: INFO nova.virt.driver [None req-2381e7dc-9aa9-4162-951e-bd0c37d7383e None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 488.157138] env[62619]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 488.157351] env[62619]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 488.157416] env[62619]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=62619) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 491.255905] env[62619]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-11de1eb9-ab21-48c9-9adc-1cbda06ca4ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.271663] env[62619]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=62619) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 491.271815] env[62619]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-41bd1e85-561c-4f8f-afa8-eeb3774b15c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.298241] env[62619]: INFO oslo_vmware.api [-] Successfully established new session; session ID is f8473. [ 491.298359] env[62619]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 3.141s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 491.298904] env[62619]: INFO nova.virt.vmwareapi.driver [None req-2381e7dc-9aa9-4162-951e-bd0c37d7383e None None] VMware vCenter version: 7.0.3 [ 491.302239] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-809b9352-f1a0-43ee-99b7-76459419a876 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.323205] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18241f37-d8da-4b4b-93af-ab485472fd4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.328815] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dea507-81f8-493f-870d-f0c16ab85d0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.335169] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b38892c-5034-48e0-aa21-94a359f10d26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.348722] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a5ad0d3-7666-46f3-adab-ebc4b84b4397 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.354534] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc89d8b-e118-4558-9363-5c2ca7c60a05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.383899] env[62619]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-aabfc36f-941c-48ab-a903-dc258b0d96ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 491.388669] env[62619]: DEBUG nova.virt.vmwareapi.driver [None req-2381e7dc-9aa9-4162-951e-bd0c37d7383e None None] Extension org.openstack.compute already exists. {{(pid=62619) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:225}} [ 491.391322] env[62619]: INFO nova.compute.provider_config [None req-2381e7dc-9aa9-4162-951e-bd0c37d7383e None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 491.895067] env[62619]: DEBUG nova.context [None req-2381e7dc-9aa9-4162-951e-bd0c37d7383e None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),9a7c1614-c334-4151-9477-41d9d08ac2d5(cell1) {{(pid=62619) load_cells /opt/stack/nova/nova/context.py:464}} [ 491.897109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 491.897343] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 491.898015] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 491.898506] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Acquiring lock "9a7c1614-c334-4151-9477-41d9d08ac2d5" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 491.898708] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Lock "9a7c1614-c334-4151-9477-41d9d08ac2d5" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 491.899751] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Lock "9a7c1614-c334-4151-9477-41d9d08ac2d5" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 491.919536] env[62619]: INFO dbcounter [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Registered counter for database nova_cell0 [ 491.927353] env[62619]: INFO dbcounter [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Registered counter for database nova_cell1 [ 491.930711] env[62619]: DEBUG oslo_db.sqlalchemy.engines [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62619) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 491.931093] env[62619]: DEBUG oslo_db.sqlalchemy.engines [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=62619) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 491.935908] env[62619]: ERROR nova.db.main.api [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 491.935908] env[62619]: result = function(*args, **kwargs) [ 491.935908] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 491.935908] env[62619]: return func(*args, **kwargs) [ 491.935908] env[62619]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 491.935908] env[62619]: result = fn(*args, **kwargs) [ 491.935908] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 491.935908] env[62619]: return f(*args, **kwargs) [ 491.935908] env[62619]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 491.935908] env[62619]: return db.service_get_minimum_version(context, binaries) [ 491.935908] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 491.935908] env[62619]: _check_db_access() [ 491.935908] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 491.935908] env[62619]: stacktrace = ''.join(traceback.format_stack()) [ 491.935908] env[62619]: [ 491.936743] env[62619]: ERROR nova.db.main.api [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 491.936743] env[62619]: result = function(*args, **kwargs) [ 491.936743] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 491.936743] env[62619]: return func(*args, **kwargs) [ 491.936743] env[62619]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 491.936743] env[62619]: result = fn(*args, **kwargs) [ 491.936743] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 491.936743] env[62619]: return f(*args, **kwargs) [ 491.936743] env[62619]: File "/opt/stack/nova/nova/objects/service.py", line 554, in _db_service_get_minimum_version [ 491.936743] env[62619]: return db.service_get_minimum_version(context, binaries) [ 491.936743] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 491.936743] env[62619]: _check_db_access() [ 491.936743] env[62619]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 491.936743] env[62619]: stacktrace = ''.join(traceback.format_stack()) [ 491.936743] env[62619]: [ 491.937153] env[62619]: WARNING nova.objects.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 491.937280] env[62619]: WARNING nova.objects.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Failed to get minimum service version for cell 9a7c1614-c334-4151-9477-41d9d08ac2d5 [ 491.937697] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Acquiring lock "singleton_lock" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 491.937862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Acquired lock "singleton_lock" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 491.938119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Releasing lock "singleton_lock" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 491.938476] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Full set of CONF: {{(pid=62619) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:363}} [ 491.938655] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ******************************************************************************** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2806}} [ 491.938798] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Configuration options gathered from: {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2807}} [ 491.938933] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2808}} [ 491.939143] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2809}} [ 491.939272] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ================================================================================ {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2811}} [ 491.939484] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] allow_resize_to_same_host = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.939685] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] arq_binding_timeout = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.939826] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] backdoor_port = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.939949] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] backdoor_socket = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.940127] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] block_device_allocate_retries = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.940295] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] block_device_allocate_retries_interval = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.940461] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cert = self.pem {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.940630] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.940804] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute_monitors = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.940967] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] config_dir = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.941162] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] config_drive_format = iso9660 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.941297] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.941464] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] config_source = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.941634] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] console_host = devstack {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.941800] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] control_exchange = nova {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.941957] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cpu_allocation_ratio = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.942130] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] daemon = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.942303] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] debug = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.942461] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] default_access_ip_network_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.942658] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] default_availability_zone = nova {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.942827] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] default_ephemeral_format = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.942990] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] default_green_pool_size = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.943244] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.943413] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] default_schedule_zone = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.943575] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] disk_allocation_ratio = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.943735] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] enable_new_services = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.943916] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] enabled_apis = ['osapi_compute'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.944095] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] enabled_ssl_apis = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.944260] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] flat_injected = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.944419] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] force_config_drive = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.944578] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] force_raw_images = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.944750] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] graceful_shutdown_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.944913] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] heal_instance_info_cache_interval = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.945145] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] host = cpu-1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.945327] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] initial_cpu_allocation_ratio = 4.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.945492] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] initial_disk_allocation_ratio = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.945681] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] initial_ram_allocation_ratio = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.945906] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.946088] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] instance_build_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.946255] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] instance_delete_interval = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.946425] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] instance_format = [instance: %(uuid)s] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.946596] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] instance_name_template = instance-%08x {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.946756] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] instance_usage_audit = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.946926] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] instance_usage_audit_period = month {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.947106] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.947279] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] instances_path = /opt/stack/data/nova/instances {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.947446] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] internal_service_availability_zone = internal {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.947605] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] key = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.947763] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] live_migration_retry_count = 30 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.947931] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_color = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.948111] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_config_append = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.948282] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.948475] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_dir = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.948674] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.948818] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_options = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.948987] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_rotate_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.949176] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_rotate_interval_type = days {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.949345] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] log_rotation_type = none {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.949509] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.949645] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.949821] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.949988] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.950132] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.950304] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] long_rpc_timeout = 1800 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.950464] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] max_concurrent_builds = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.950625] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] max_concurrent_live_migrations = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.950780] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] max_concurrent_snapshots = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.950942] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] max_local_block_devices = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.951108] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] max_logfile_count = 30 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.951268] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] max_logfile_size_mb = 200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.951426] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] maximum_instance_delete_attempts = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.951599] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] metadata_listen = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.951802] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] metadata_listen_port = 8775 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.951979] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] metadata_workers = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.952157] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] migrate_max_retries = -1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.952328] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] mkisofs_cmd = genisoimage {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.952538] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] my_block_storage_ip = 10.180.1.21 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.952673] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] my_ip = 10.180.1.21 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.952839] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] network_allocate_retries = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.953030] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.953206] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] osapi_compute_listen = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.953371] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] osapi_compute_listen_port = 8774 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.953538] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] osapi_compute_unique_server_name_scope = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.953706] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] osapi_compute_workers = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.953869] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] password_length = 12 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.954039] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] periodic_enable = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.954209] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] periodic_fuzzy_delay = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.954379] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] pointer_model = usbtablet {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.954589] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] preallocate_images = none {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.954781] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] publish_errors = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.954918] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] pybasedir = /opt/stack/nova {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.955087] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ram_allocation_ratio = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.955251] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] rate_limit_burst = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.955422] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] rate_limit_except_level = CRITICAL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.955583] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] rate_limit_interval = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.955742] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] reboot_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.955900] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] reclaim_instance_interval = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.956063] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] record = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.956237] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] reimage_timeout_per_gb = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.956467] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] report_interval = 120 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.956651] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] rescue_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.956813] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] reserved_host_cpus = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.956973] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] reserved_host_disk_mb = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.957149] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] reserved_host_memory_mb = 512 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.957312] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] reserved_huge_pages = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.957472] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] resize_confirm_window = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.957632] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] resize_fs_using_block_device = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.957824] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] resume_guests_state_on_host_boot = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.958012] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.958184] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] rpc_response_timeout = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.958377] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] run_external_periodic_tasks = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.958605] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] running_deleted_instance_action = reap {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.958779] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] running_deleted_instance_poll_interval = 1800 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.958943] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] running_deleted_instance_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.959118] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler_instance_sync_interval = 120 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.959291] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_down_time = 720 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.959509] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] servicegroup_driver = db {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.959711] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] shell_completion = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.959882] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] shelved_offload_time = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.960055] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] shelved_poll_interval = 3600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.960259] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] shutdown_timeout = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.960441] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] source_is_ipv6 = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.960605] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ssl_only = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.960862] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.961046] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] sync_power_state_interval = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.961218] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] sync_power_state_pool_size = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.961392] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] syslog_log_facility = LOG_USER {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.961547] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] tempdir = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.961710] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] timeout_nbd = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.961879] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] transport_url = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.962051] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] update_resources_interval = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.962217] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] use_cow_images = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.962374] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] use_eventlog = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.962533] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] use_journal = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.962692] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] use_json = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.962850] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] use_rootwrap_daemon = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.963012] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] use_stderr = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.963188] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] use_syslog = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.963348] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vcpu_pin_set = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.963517] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plugging_is_fatal = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.963689] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plugging_timeout = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.963856] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] virt_mkfs = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.964030] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] volume_usage_poll_interval = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.964198] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] watch_log_file = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.964370] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] web = /usr/share/spice-html5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2819}} [ 491.964562] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_brick.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.964801] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.964987] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_brick.wait_mpath_device_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.965181] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_concurrency.disable_process_locking = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.965825] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.966035] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.966218] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.966400] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_metrics.metrics_process_name = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.966579] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.966750] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.966939] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.auth_strategy = keystone {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.967125] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.compute_link_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.967309] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.967491] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.dhcp_domain = novalocal {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.967665] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.enable_instance_password = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.967838] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.glance_link_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.968015] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.968206] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.instance_list_cells_batch_strategy = distributed {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.968373] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.instance_list_per_project_cells = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.968574] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.list_records_by_skipping_down_cells = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.968750] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.local_metadata_per_cell = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.968925] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.max_limit = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.969111] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.metadata_cache_expiration = 15 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.969293] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.neutron_default_tenant_id = default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.969492] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.response_validation = warn {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.969682] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.use_neutron_default_nets = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.969855] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.970030] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.vendordata_dynamic_failure_fatal = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.970206] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.970381] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.vendordata_dynamic_ssl_certfile = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.970583] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.vendordata_dynamic_targets = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.970774] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.vendordata_jsonfile_path = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.970962] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api.vendordata_providers = ['StaticJSON'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.971175] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.backend = dogpile.cache.memcached {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.971345] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.backend_argument = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.971519] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.config_prefix = cache.oslo {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.971690] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.dead_timeout = 60.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.971855] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.debug_cache_backend = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.972024] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.enable_retry_client = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.972192] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.enable_socket_keepalive = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.972365] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.enabled = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.972528] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.enforce_fips_mode = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.972694] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.expiration_time = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.972860] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.hashclient_retry_attempts = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.973033] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.hashclient_retry_delay = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.973202] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_dead_retry = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.973362] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_password = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.973524] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.973688] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.973851] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_pool_maxsize = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.974018] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_pool_unused_timeout = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.974186] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_sasl_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.974367] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_servers = ['localhost:11211'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.974538] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_socket_timeout = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.974701] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.memcache_username = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.974868] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.proxies = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.975044] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.redis_db = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.975211] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.redis_password = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.975386] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.redis_sentinel_service_name = mymaster {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.975565] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.redis_sentinels = ['localhost:26379'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.975741] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.redis_server = localhost:6379 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.975906] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.redis_socket_timeout = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.976080] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.redis_username = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.976248] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.retry_attempts = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.976426] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.retry_delay = 0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.976611] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.socket_keepalive_count = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.976777] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.socket_keepalive_idle = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.976938] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.socket_keepalive_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.977110] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.tls_allowed_ciphers = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.977271] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.tls_cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.977429] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.tls_certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.977594] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.tls_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.977751] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cache.tls_keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.977923] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.978109] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.auth_type = password {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.978274] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.978477] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.catalog_info = volumev3::publicURL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.978652] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.978818] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.978982] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.cross_az_attach = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.979163] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.debug = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.979326] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.endpoint_template = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.979514] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.http_retries = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.979685] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.979844] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.980028] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.os_region_name = RegionOne {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.980198] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.980360] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cinder.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.980532] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.980696] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.cpu_dedicated_set = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.980856] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.cpu_shared_set = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.981033] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.image_type_exclude_list = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.981203] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.live_migration_wait_for_vif_plug = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.981368] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.max_concurrent_disk_ops = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.981533] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.max_disk_devices_to_attach = -1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.981695] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.981867] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.982044] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.resource_provider_association_refresh = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.982213] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.sharing_providers_max_uuids_per_request = 200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.982375] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.shutdown_retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.982576] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.982783] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] conductor.workers = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.982970] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] console.allowed_origins = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.983148] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] console.ssl_ciphers = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.983326] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] console.ssl_minimum_version = default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.983494] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] consoleauth.enforce_session_timeout = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.983665] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] consoleauth.token_ttl = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.983838] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.983995] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.984173] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.984332] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.984490] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.984648] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.984810] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.984965] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.985138] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.985298] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.985457] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.985617] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.985775] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.985946] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.service_type = accelerator {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.986122] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.986282] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.986456] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.986628] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.986813] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.986974] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] cyborg.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.987171] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.backend = sqlalchemy {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.987347] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.connection = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.987515] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.connection_debug = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.987686] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.connection_parameters = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.987850] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.connection_recycle_time = 3600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.988014] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.connection_trace = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.988189] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.db_inc_retry_interval = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.988365] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.db_max_retries = 20 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.988598] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.db_max_retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.988778] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.db_retry_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.988942] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.max_overflow = 50 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.989120] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.max_pool_size = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.989286] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.max_retries = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.989478] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.mysql_sql_mode = TRADITIONAL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.989685] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.mysql_wsrep_sync_wait = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.989852] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.pool_timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.990028] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.990193] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.slave_connection = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.990356] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.sqlite_synchronous = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.990516] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] database.use_db_reconnect = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.990705] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.backend = sqlalchemy {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.990877] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.connection = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.991050] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.connection_debug = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.991225] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.connection_parameters = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.991391] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.connection_recycle_time = 3600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.991556] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.connection_trace = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.991717] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.db_inc_retry_interval = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.991881] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.db_max_retries = 20 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.992055] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.db_max_retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.992223] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.db_retry_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.992383] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.max_overflow = 50 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.992545] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.max_pool_size = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.992710] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.max_retries = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.992881] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.993051] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.mysql_wsrep_sync_wait = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.993214] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.pool_timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.993376] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.retry_interval = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.993535] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.slave_connection = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.993740] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] api_database.sqlite_synchronous = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.993930] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] devices.enabled_mdev_types = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.994124] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.994301] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ephemeral_storage_encryption.default_format = luks {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.994465] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ephemeral_storage_encryption.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.994662] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ephemeral_storage_encryption.key_size = 512 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.994855] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.api_servers = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.995034] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.995203] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.995365] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.995522] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.995685] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.995846] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.debug = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.996017] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.default_trusted_certificate_ids = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.996183] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.enable_certificate_validation = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.996345] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.enable_rbd_download = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.996507] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.996672] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.996838] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.996993] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.997166] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.997330] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.num_retries = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.997500] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.rbd_ceph_conf = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.997665] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.rbd_connect_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.997833] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.rbd_pool = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.998000] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.rbd_user = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.998175] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.998337] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.998522] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.998706] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.service_type = image {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.998870] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.999048] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.999211] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.999371] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.999586] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.999757] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.verify_glance_signatures = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 491.999916] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] glance.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.000097] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] guestfs.debug = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.000267] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] mks.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.000629] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.000827] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] image_cache.manager_interval = 2400 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.001012] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] image_cache.precache_concurrency = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.001193] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] image_cache.remove_unused_base_images = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.001370] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.001542] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.001726] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] image_cache.subdirectory_name = _base {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.001905] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.api_max_retries = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.002085] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.api_retry_interval = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.002252] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.002417] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.002580] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.002743] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.002908] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.003082] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.conductor_group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.003247] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.003407] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.003571] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.003738] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.003899] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.004069] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.004231] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.004401] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.peer_list = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.004563] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.004756] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.004929] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.serial_console_state_timeout = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.005104] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.005284] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.service_type = baremetal {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.005446] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.shard = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.005610] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.005826] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.005996] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.006170] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.006360] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.006559] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ironic.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.006761] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.006936] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] key_manager.fixed_key = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.007137] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.007304] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.barbican_api_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.007463] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.barbican_endpoint = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.007637] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.barbican_endpoint_type = public {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.007794] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.barbican_region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.007955] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.008126] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.008291] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.008479] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.008654] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.008821] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.number_of_retries = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.008982] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.retry_delay = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.009162] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.send_service_user_token = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.009325] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.009508] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.009683] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.verify_ssl = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.009845] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican.verify_ssl_path = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.010020] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.010194] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.010357] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.010514] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.010677] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.010835] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.010993] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.011170] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.011324] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] barbican_service_user.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.011488] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.approle_role_id = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.011645] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.approle_secret_id = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.011840] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.kv_mountpoint = secret {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.012015] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.kv_path = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.012186] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.kv_version = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.012349] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.namespace = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.012509] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.root_token_id = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.012672] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.ssl_ca_crt_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.012838] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.timeout = 60.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.013071] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.use_ssl = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.013253] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.013435] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.013601] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.013764] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.013925] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.014109] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.014276] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.014435] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.014594] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.014755] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.014911] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.015079] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.015239] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.015396] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.015555] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.015712] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.015882] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.service_type = identity {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.016054] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.016219] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.016380] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.016570] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.016761] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.016923] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] keystone.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.017138] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.connection_uri = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.017304] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.cpu_mode = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.017470] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.cpu_model_extra_flags = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.017640] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.cpu_models = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.017810] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.cpu_power_governor_high = performance {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.017980] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.cpu_power_governor_low = powersave {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.018159] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.cpu_power_management = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.018335] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.018539] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.device_detach_attempts = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.018725] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.device_detach_timeout = 20 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.018895] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.disk_cachemodes = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.019068] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.disk_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.019238] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.enabled_perf_events = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.019411] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.file_backed_memory = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.019611] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.gid_maps = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.019764] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.hw_disk_discard = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.019921] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.hw_machine_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.020112] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.images_rbd_ceph_conf = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.020290] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.020457] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.020632] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.images_rbd_glance_store_name = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.020804] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.images_rbd_pool = rbd {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.020974] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.images_type = default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.021147] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.images_volume_group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.021310] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.inject_key = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.021473] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.inject_partition = -2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.021634] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.inject_password = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.021796] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.iscsi_iface = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.021958] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.iser_use_multipath = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.022135] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_bandwidth = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.022301] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_completion_timeout = 800 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.022463] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_downtime = 500 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.022628] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_downtime_delay = 75 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.022788] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_downtime_steps = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.022948] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_inbound_addr = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.023122] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_permit_auto_converge = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.023292] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_permit_post_copy = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.023454] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_scheme = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.023631] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_timeout_action = abort {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.023795] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_tunnelled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.023955] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_uri = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.024129] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.live_migration_with_native_tls = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.024291] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.max_queues = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.024454] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.mem_stats_period_seconds = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.024692] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.migration_inbound_addr = 10.180.1.21 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.024858] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.nfs_mount_options = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.025176] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.025356] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.num_aoe_discover_tries = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.025525] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.num_iser_scan_tries = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.025690] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.num_memory_encrypted_guests = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.025854] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.num_nvme_discover_tries = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.026030] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.num_pcie_ports = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.026209] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.num_volume_scan_tries = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.026377] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.pmem_namespaces = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.026567] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.quobyte_client_cfg = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.026859] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.027046] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rbd_connect_timeout = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.027219] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.027385] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.027547] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rbd_secret_uuid = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.027707] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rbd_user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.027870] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.realtime_scheduler_priority = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.028050] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.remote_filesystem_transport = ssh {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.028218] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rescue_image_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.028377] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rescue_kernel_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.028566] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rescue_ramdisk_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.028752] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rng_dev_path = /dev/urandom {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.028916] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.rx_queue_size = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.029128] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.smbfs_mount_options = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.029428] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.029680] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.snapshot_compression = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.029867] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.snapshot_image_format = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.030161] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.030347] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.sparse_logical_volumes = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.030539] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.swtpm_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.030739] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.swtpm_group = tss {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.030910] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.swtpm_user = tss {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.031096] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.sysinfo_serial = unique {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.031262] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.tb_cache_size = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.031422] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.tx_queue_size = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.031592] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.uid_maps = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.031756] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.use_virtio_for_bridges = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.031930] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.virt_type = kvm {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.032114] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.volume_clear = zero {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.032282] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.volume_clear_size = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.032449] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.volume_use_multipath = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.032608] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.vzstorage_cache_path = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.032778] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.032947] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.vzstorage_mount_group = qemu {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.033126] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.vzstorage_mount_opts = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.033297] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.033572] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.033752] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.vzstorage_mount_user = stack {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.033919] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.034106] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.034285] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.auth_type = password {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.034446] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.034608] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.034772] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.034931] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.035102] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.035279] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.default_floating_pool = public {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.035441] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.035607] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.extension_sync_interval = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.035771] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.http_retries = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.035934] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.036105] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.036298] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.036492] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.metadata_proxy_shared_secret = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.036667] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.036842] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.ovs_bridge = br-int {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.037014] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.physnets = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.037233] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.region_name = RegionOne {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.037415] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.037593] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.service_metadata_proxy = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.037756] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.037926] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.service_type = network {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.038105] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.038269] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.038443] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.038618] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.038803] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.038965] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] neutron.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.039154] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] notifications.bdms_in_notifications = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.039335] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] notifications.default_level = INFO {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.039544] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] notifications.notification_format = unversioned {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.039723] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] notifications.notify_on_state_change = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.039901] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.040089] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] pci.alias = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.040266] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] pci.device_spec = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.040433] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] pci.report_in_placement = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.040609] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.040786] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.auth_type = password {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.040955] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.auth_url = http://10.180.1.21/identity {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.041128] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.041289] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.041452] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.041612] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.041770] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.041925] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.default_domain_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.042097] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.default_domain_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.042262] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.domain_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.042421] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.domain_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.042607] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.042793] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.042954] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.043127] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.043287] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.043459] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.password = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.043621] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.project_domain_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.043789] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.project_domain_name = Default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.043960] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.project_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.044148] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.project_name = service {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.044320] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.region_name = RegionOne {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.044482] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.044644] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.044813] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.service_type = placement {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.044976] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.045151] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.045310] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.045469] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.system_scope = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.045627] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.045784] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.trust_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.045942] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.user_domain_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.046124] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.user_domain_name = Default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.046286] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.user_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.046477] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.username = nova {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.046674] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.046838] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] placement.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.047026] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.cores = 20 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.047200] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.count_usage_from_placement = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.047375] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.047553] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.injected_file_content_bytes = 10240 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.047722] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.injected_file_path_length = 255 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.047889] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.injected_files = 5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.048066] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.instances = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.048239] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.key_pairs = 100 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.048427] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.metadata_items = 128 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.048603] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.ram = 51200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.048773] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.recheck_quota = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.048942] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.server_group_members = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.049121] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] quota.server_groups = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.049297] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.049486] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.049667] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.image_metadata_prefilter = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.049832] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.049997] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.max_attempts = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.050177] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.max_placement_results = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.050342] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.050503] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.query_placement_for_image_type_support = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.050666] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.050842] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] scheduler.workers = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.051025] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.051205] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.051384] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.051554] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.051721] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.051886] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.052062] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.052256] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.052425] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.host_subset_size = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.052589] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.052748] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.image_properties_default_architecture = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.052907] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.053082] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.isolated_hosts = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.053249] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.isolated_images = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.053412] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.max_instances_per_host = 50 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.053577] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.053739] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.053902] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.pci_in_placement = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.054076] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.054251] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.054438] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.054635] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.054819] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.054984] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.055164] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.track_instance_changes = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.055345] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.055516] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] metrics.required = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.055681] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] metrics.weight_multiplier = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.055843] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] metrics.weight_of_unavailable = -10000.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.056010] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] metrics.weight_setting = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.056336] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.056511] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] serial_console.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.056692] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] serial_console.port_range = 10000:20000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.056862] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.057039] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.057210] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] serial_console.serialproxy_port = 6083 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.057377] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.057552] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.auth_type = password {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.057712] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.057868] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.058037] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.058202] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.058358] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.058557] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.send_service_user_token = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.058727] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.058899] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] service_user.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.059083] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.agent_enabled = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.059250] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.059600] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.059809] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.html5proxy_host = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.059986] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.html5proxy_port = 6082 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.060165] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.image_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.060328] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.jpeg_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.060489] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.playback_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.060654] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.require_secure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.060826] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.server_listen = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.060994] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.061169] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.streaming_mode = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.061330] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] spice.zlib_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.061497] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] upgrade_levels.baseapi = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.061671] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] upgrade_levels.compute = auto {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.061831] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] upgrade_levels.conductor = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.061989] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] upgrade_levels.scheduler = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.062168] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.062330] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.062512] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.062734] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.062913] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.063088] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.063255] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.063419] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.063583] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vendordata_dynamic_auth.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.063754] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.api_retry_count = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.063914] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.ca_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.064096] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.cache_prefix = devstack-image-cache {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.064268] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.cluster_name = testcl1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.064433] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.connection_pool_size = 10 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.064595] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.console_delay_seconds = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.064766] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.datastore_regex = ^datastore.* {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.064969] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.065174] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.host_password = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.065378] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.host_port = 443 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.065556] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.host_username = administrator@vsphere.local {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.065727] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.insecure = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.065890] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.integration_bridge = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.066070] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.maximum_objects = 100 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.066236] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.pbm_default_policy = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.066405] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.pbm_enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.066631] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.pbm_wsdl_location = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.066821] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.066983] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.serial_port_proxy_uri = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.067160] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.serial_port_service_uri = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.067330] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.task_poll_interval = 0.5 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.067545] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.use_linked_clone = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.067725] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.vnc_keymap = en-us {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.067927] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.vnc_port = 5900 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.068123] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vmware.vnc_port_total = 10000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.068322] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.auth_schemes = ['none'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.068560] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.068879] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.069114] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.069312] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.novncproxy_port = 6080 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.069553] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.server_listen = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.069746] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.069947] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.vencrypt_ca_certs = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.070136] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.vencrypt_client_cert = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.070329] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vnc.vencrypt_client_key = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.070531] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.070735] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.disable_deep_image_inspection = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.070916] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.disable_fallback_pcpu_query = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.071110] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.disable_group_policy_check_upcall = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.071312] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.071483] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.disable_rootwrap = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.071652] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.enable_numa_live_migration = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.071855] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.072040] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.072212] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.handle_virt_lifecycle_events = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.072385] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.libvirt_disable_apic = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.072588] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.never_download_image_if_on_rbd = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.072763] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.072959] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.073155] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.073329] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.073526] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.073698] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.073892] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.074079] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.074277] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.074488] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.074671] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.client_socket_timeout = 900 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.074873] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.default_pool_size = 1000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.075151] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.keep_alive = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.075358] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.max_header_line = 16384 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.075530] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.secure_proxy_ssl_header = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.075723] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.ssl_ca_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.075905] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.ssl_cert_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.076085] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.ssl_key_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.076259] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.tcp_keepidle = 600 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.076494] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.076682] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] zvm.ca_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.076879] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] zvm.cloud_connector_url = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.077203] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.077413] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] zvm.reachable_timeout = 300 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.077644] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.enforce_new_defaults = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.078094] env[62619]: WARNING oslo_config.cfg [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] Deprecated: Option "enforce_scope" from group "oslo_policy" is deprecated for removal (This configuration was added temporarily to facilitate a smooth transition to the new RBAC. OpenStack will always enforce scope checks. This configuration option is deprecated and will be removed in the 2025.2 cycle.). Its value may be silently ignored in the future. [ 492.078320] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.enforce_scope = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.078535] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.policy_default_rule = default {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.078732] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.078912] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.policy_file = policy.yaml {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.079097] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.079265] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.079429] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.079591] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.079754] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.079921] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.080109] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.080292] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.connection_string = messaging:// {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.080461] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.enabled = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.080634] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.es_doc_type = notification {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.080802] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.es_scroll_size = 10000 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.080969] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.es_scroll_time = 2m {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.081146] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.filter_error_trace = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.081317] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.hmac_keys = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.081484] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.sentinel_service_name = mymaster {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.081651] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.socket_timeout = 0.1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.081813] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.trace_requests = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.081973] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler.trace_sqlalchemy = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.082161] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler_jaeger.process_tags = {} {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.082323] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler_jaeger.service_name_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.082485] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] profiler_otlp.service_name_prefix = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.082650] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] remote_debug.host = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.082809] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] remote_debug.port = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.083051] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.083239] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.083410] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.083576] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.083736] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.083896] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.084066] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.084233] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.heartbeat_rate = 3 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.084396] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.084566] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.hostname = devstack {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.084724] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.084890] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.085496] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.085496] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.085496] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.processname = nova-compute {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.085627] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.085711] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.085878] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.086054] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.086220] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.086389] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.086555] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.086718] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.086884] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.087059] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.087223] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_stream_fanout = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.087384] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.087545] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rabbit_transient_quorum_queue = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.087714] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.087878] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.ssl = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.088059] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.088244] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.088427] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.088599] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.088769] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.ssl_version = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.088931] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_rabbit.use_queue_manager = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.089138] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.089307] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_notifications.retry = -1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.089526] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.089719] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_messaging_notifications.transport_url = **** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.089895] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.auth_section = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.090074] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.auth_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.090239] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.cafile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.090398] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.certfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.090563] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.collect_timing = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.090725] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.connect_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.090879] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.connect_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.091045] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.endpoint_id = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.091209] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.endpoint_override = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.091368] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.insecure = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.091525] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.keyfile = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.091683] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.max_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.091839] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.min_version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.091995] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.region_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.092169] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.retriable_status_codes = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.092329] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.service_name = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.092485] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.service_type = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.092646] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.split_loggers = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.092803] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.status_code_retries = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.092960] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.status_code_retry_delay = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.093128] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.timeout = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.093288] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.valid_interfaces = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.093443] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_limit.version = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.093609] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_reports.file_event_handler = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.093772] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_reports.file_event_handler_interval = 1 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.093928] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] oslo_reports.log_dir = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.094110] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.094272] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_linux_bridge_privileged.group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.094430] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.094595] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.094759] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.094917] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_linux_bridge_privileged.user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.095101] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.095265] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_ovs_privileged.group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.095423] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_ovs_privileged.helper_command = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.095588] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.095746] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.095902] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] vif_plug_ovs_privileged.user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.096085] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_linux_bridge.flat_interface = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.096268] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.096444] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.096617] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.096789] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.096966] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.097159] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.097328] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_linux_bridge.vlan_interface = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.097507] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.097680] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_ovs.isolate_vif = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.097848] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.098031] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.098207] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.098377] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_ovs.ovsdb_interface = native {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.098562] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] os_vif_ovs.per_port_bridge = False {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.098737] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] privsep_osbrick.capabilities = [21] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.098897] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] privsep_osbrick.group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.099067] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] privsep_osbrick.helper_command = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.099234] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.099407] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] privsep_osbrick.thread_pool_size = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.099657] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] privsep_osbrick.user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.099861] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.100040] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] nova_sys_admin.group = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.100219] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] nova_sys_admin.helper_command = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.100390] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.100557] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] nova_sys_admin.thread_pool_size = 8 {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.100722] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] nova_sys_admin.user = None {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2826}} [ 492.100849] env[62619]: DEBUG oslo_service.service [None req-57051641-8b76-4029-9dba-5f4f2632e6eb None None] ******************************************************************************** {{(pid=62619) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2830}} [ 492.101360] env[62619]: INFO nova.service [-] Starting compute node (version 30.1.0) [ 492.607018] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Getting list of instances from cluster (obj){ [ 492.607018] env[62619]: value = "domain-c8" [ 492.607018] env[62619]: _type = "ClusterComputeResource" [ 492.607018] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 492.607018] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-101e9b44-01e9-414d-a7aa-bd316f6d76c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 492.614989] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Got total of 0 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 492.615679] env[62619]: WARNING nova.virt.vmwareapi.driver [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 492.616284] env[62619]: INFO nova.virt.node [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Generated node identity c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 [ 492.616664] env[62619]: INFO nova.virt.node [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Wrote node identity c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 to /opt/stack/data/n-cpu-1/compute_id [ 493.119065] env[62619]: WARNING nova.compute.manager [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Compute nodes ['c1b543f3-8b72-4e01-a5a8-30dc9ed76c83'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 494.124934] env[62619]: INFO nova.compute.manager [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 495.133021] env[62619]: WARNING nova.compute.manager [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 495.133021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 495.133021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 495.133021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 495.133021] env[62619]: DEBUG nova.compute.resource_tracker [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 495.133021] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143136d3-35cc-44b8-b93c-fad9400dded7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.141136] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c8adc4d-7492-4a3b-b58a-5137fc734633 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.154648] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4d2ee19-add0-4d81-a7a9-3cebbc2b2e77 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.161205] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-011c602c-a5f3-43a6-b2d5-1cf2414835cb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 495.190308] env[62619]: DEBUG nova.compute.resource_tracker [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181446MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 495.190619] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 495.190951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 495.698595] env[62619]: WARNING nova.compute.resource_tracker [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] No compute node record for cpu-1:c1b543f3-8b72-4e01-a5a8-30dc9ed76c83: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 could not be found. [ 496.201772] env[62619]: INFO nova.compute.resource_tracker [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 [ 497.715785] env[62619]: DEBUG nova.compute.resource_tracker [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 497.715785] env[62619]: DEBUG nova.compute.resource_tracker [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 497.865413] env[62619]: INFO nova.scheduler.client.report [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] [req-f5cf539a-4874-4809-94ae-16a429b297b9] Created resource provider record via placement API for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 497.878830] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51fb6cf8-e827-4975-9411-5d9be6bf5676 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.887096] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2854dad-f99c-44eb-9239-0b9a1acf9094 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.917307] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef817879-ae6d-4d6c-bf49-e32acd3d0bce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.924638] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf4ecb22-e64b-4945-a8d5-8dfa3b68a3e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 497.937537] env[62619]: DEBUG nova.compute.provider_tree [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 498.472957] env[62619]: DEBUG nova.scheduler.client.report [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 498.473284] env[62619]: DEBUG nova.compute.provider_tree [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 0 to 1 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 498.473465] env[62619]: DEBUG nova.compute.provider_tree [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 498.523614] env[62619]: DEBUG nova.compute.provider_tree [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 1 to 2 during operation: update_traits {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 499.028382] env[62619]: DEBUG nova.compute.resource_tracker [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 499.028998] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.838s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 499.028998] env[62619]: DEBUG nova.service [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Creating RPC server for service compute {{(pid=62619) start /opt/stack/nova/nova/service.py:186}} [ 499.042938] env[62619]: DEBUG nova.service [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] Join ServiceGroup membership for this service compute {{(pid=62619) start /opt/stack/nova/nova/service.py:203}} [ 499.043208] env[62619]: DEBUG nova.servicegroup.drivers.db [None req-cf18f121-054a-47ed-ad04-d9c899bde6a7 None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=62619) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 524.045677] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._sync_power_states {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 524.549397] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Getting list of instances from cluster (obj){ [ 524.549397] env[62619]: value = "domain-c8" [ 524.549397] env[62619]: _type = "ClusterComputeResource" [ 524.549397] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 524.550661] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e06f8e4d-c82e-497d-ae50-fab8d7671d34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.562234] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Got total of 0 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 524.562234] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 524.562234] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Getting list of instances from cluster (obj){ [ 524.562234] env[62619]: value = "domain-c8" [ 524.562234] env[62619]: _type = "ClusterComputeResource" [ 524.562234] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 524.563887] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c04da40-a16b-470c-b0d3-04f280974936 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 524.572394] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Got total of 0 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 534.337534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Acquiring lock "f6d9ce5b-d610-4573-b43b-21836f7f8a1b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 534.337818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Lock "f6d9ce5b-d610-4573-b43b-21836f7f8a1b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 534.843114] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 535.385622] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 535.386815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 535.389053] env[62619]: INFO nova.compute.claims [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 536.438076] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b487e7-0f74-4b21-a15e-140f378b2523 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.446203] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea5a878a-068a-4d2c-8043-89d2a08fe4a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.479167] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ba05c1d-0587-4600-a66f-8a6a78903530 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.486747] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-028cfba2-d35a-483a-a1cf-42774a94a0c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 536.499911] env[62619]: DEBUG nova.compute.provider_tree [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 537.005188] env[62619]: DEBUG nova.scheduler.client.report [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 537.102984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "1f94cb8f-7773-4330-ab1e-9ccad7585b07" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.103264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "1f94cb8f-7773-4330-ab1e-9ccad7585b07" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.512364] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.126s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 537.514695] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 537.554196] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Acquiring lock "0c828d87-be04-47a4-87f9-a0f54622326b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.554196] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Lock "0c828d87-be04-47a4-87f9-a0f54622326b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.576209] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Acquiring lock "d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 537.576437] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Lock "d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 537.605907] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.023411] env[62619]: DEBUG nova.compute.utils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 538.025185] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 538.029945] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 538.066017] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.079654] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 538.149584] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.149994] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 538.151681] env[62619]: INFO nova.compute.claims [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 538.279606] env[62619]: DEBUG nova.policy [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7cccd3adaaf748fd8a310252189d176f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9ce83b74237482a95523b2c970edb85', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 538.537785] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 538.601567] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.607596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.705708] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Acquiring lock "88346467-e1ac-4647-bcf7-063636853a2b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 538.705931] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Lock "88346467-e1ac-4647-bcf7-063636853a2b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 539.170887] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Successfully created port: a14ec30f-de96-4fd7-b218-8c0fb823620c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 539.208038] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 539.280412] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96f53d87-bde6-48dc-bfee-006927080208 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.293814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26faf708-fa4c-4011-9e40-ee97c73375c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.326856] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21750030-1c1d-4f2c-b5d4-cff169bdbfc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.335487] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ba6d66-1746-4ecb-91cf-0040e7370288 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.349666] env[62619]: DEBUG nova.compute.provider_tree [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 539.548273] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 539.590790] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 539.590790] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 539.590978] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 539.591030] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 539.591154] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 539.591941] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 539.591941] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 539.591941] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 539.592322] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 539.592385] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 539.592525] env[62619]: DEBUG nova.virt.hardware [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 539.593396] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc5fc6e0-bf10-4649-af6c-0ed40cdd4e1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.601655] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f06f56f7-4d38-48fb-9d65-5d1d52913e70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.625165] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-985c40d4-29cc-4b4b-af7b-ad5794c839a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 539.754437] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 539.853674] env[62619]: DEBUG nova.scheduler.client.report [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 540.361528] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.211s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 540.364016] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 540.366280] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.765s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.369262] env[62619]: INFO nova.compute.claims [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 540.442696] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 540.442937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 540.875924] env[62619]: DEBUG nova.compute.utils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 540.876619] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 540.877063] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 540.948659] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 541.058848] env[62619]: DEBUG nova.policy [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38226df72425401b9396a5ce15c8c1f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b74a31666884e19b5a91325d435a6e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 541.382671] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 541.475964] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.528509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b3fb5de-c49e-489c-ab49-600b7ad16ded {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.537462] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9087bfa1-0c3e-41d2-99a7-e0755ab9deb4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.575718] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Acquiring lock "67cf14dd-309a-4abb-b8e0-bc289c7cc845" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 541.575948] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Lock "67cf14dd-309a-4abb-b8e0-bc289c7cc845" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 541.577507] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e8e9c27-7fd3-4d7f-a116-d7419b49b3e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.586050] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3a521a-3477-45d2-af27-a7d9047daf90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 541.601042] env[62619]: DEBUG nova.compute.provider_tree [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 542.080830] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 542.107461] env[62619]: DEBUG nova.scheduler.client.report [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 542.396521] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 542.440170] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 542.440419] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 542.440578] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 542.440800] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 542.440881] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 542.441051] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 542.441282] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 542.441432] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 542.441598] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 542.441755] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 542.441916] env[62619]: DEBUG nova.virt.hardware [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 542.442793] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843a9d06-5a9f-49a1-8300-bf0fec12a00f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.452363] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cfb7885-ba42-4e07-85f2-5112436c0c56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 542.582976] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Successfully created port: 5c856c2c-a8fe-4788-9891-9b7511a0008a {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 542.605490] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 542.612149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.246s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 542.612952] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 542.616935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.009s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 542.618869] env[62619]: INFO nova.compute.claims [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 543.131137] env[62619]: DEBUG nova.compute.utils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 543.136301] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 543.136438] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 543.170090] env[62619]: ERROR nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. [ 543.170090] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 543.170090] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.170090] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 543.170090] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 543.170090] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 543.170090] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 543.170090] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 543.170090] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.170090] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 543.170090] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.170090] env[62619]: ERROR nova.compute.manager raise self.value [ 543.170090] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 543.170090] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 543.170090] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.170090] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 543.170575] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.170575] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 543.170575] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. [ 543.170575] env[62619]: ERROR nova.compute.manager [ 543.170699] env[62619]: Traceback (most recent call last): [ 543.170699] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 543.170699] env[62619]: listener.cb(fileno) [ 543.170699] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.170699] env[62619]: result = function(*args, **kwargs) [ 543.170699] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 543.170699] env[62619]: return func(*args, **kwargs) [ 543.170699] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 543.170699] env[62619]: raise e [ 543.170699] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.170699] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 543.170699] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 543.170699] env[62619]: created_port_ids = self._update_ports_for_instance( [ 543.170699] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 543.170699] env[62619]: with excutils.save_and_reraise_exception(): [ 543.171136] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.171136] env[62619]: self.force_reraise() [ 543.171136] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.171136] env[62619]: raise self.value [ 543.171136] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 543.171136] env[62619]: updated_port = self._update_port( [ 543.171136] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.171136] env[62619]: _ensure_no_port_binding_failure(port) [ 543.171136] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.171136] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 543.171136] env[62619]: nova.exception.PortBindingFailed: Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. [ 543.171136] env[62619]: Removing descriptor: 14 [ 543.173222] env[62619]: ERROR nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Traceback (most recent call last): [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] yield resources [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self.driver.spawn(context, instance, image_meta, [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] vm_ref = self.build_virtual_machine(instance, [ 543.173222] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] vif_infos = vmwarevif.get_vif_info(self._session, [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] for vif in network_info: [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] return self._sync_wrapper(fn, *args, **kwargs) [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self.wait() [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self[:] = self._gt.wait() [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] return self._exit_event.wait() [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 543.173560] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] result = hub.switch() [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] return self.greenlet.switch() [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] result = function(*args, **kwargs) [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] return func(*args, **kwargs) [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] raise e [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] nwinfo = self.network_api.allocate_for_instance( [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] created_port_ids = self._update_ports_for_instance( [ 543.173973] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] with excutils.save_and_reraise_exception(): [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self.force_reraise() [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] raise self.value [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] updated_port = self._update_port( [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] _ensure_no_port_binding_failure(port) [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] raise exception.PortBindingFailed(port_id=port['id']) [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] nova.exception.PortBindingFailed: Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. [ 543.174533] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] [ 543.175223] env[62619]: INFO nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Terminating instance [ 543.177650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Acquiring lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 543.178579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Acquired lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 543.178797] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 543.303622] env[62619]: DEBUG nova.policy [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '21d44112263a4ac5b6df6dd5a7fad411', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '974e9b943b5642588ba06cc89466b4e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 543.646671] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 543.742630] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 543.853643] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5435e80-0a13-4d16-a499-abe5f5185dd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.864415] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91e2c55-b732-4e33-ad99-2b401fa55e50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.904410] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-192513c4-098f-46e8-b656-1c421608453c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.911622] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1609c1fd-21cc-42a7-bde3-abcff9e82843 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 543.925267] env[62619]: DEBUG nova.compute.provider_tree [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 543.945144] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 544.057924] env[62619]: DEBUG nova.compute.manager [req-81816f77-a8af-46f1-9a26-f049f8458ca3 req-c7c2d32f-60d9-4bd1-a34b-6468a7c5c0a1 service nova] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Received event network-changed-a14ec30f-de96-4fd7-b218-8c0fb823620c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 544.058144] env[62619]: DEBUG nova.compute.manager [req-81816f77-a8af-46f1-9a26-f049f8458ca3 req-c7c2d32f-60d9-4bd1-a34b-6468a7c5c0a1 service nova] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Refreshing instance network info cache due to event network-changed-a14ec30f-de96-4fd7-b218-8c0fb823620c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 544.058365] env[62619]: DEBUG oslo_concurrency.lockutils [req-81816f77-a8af-46f1-9a26-f049f8458ca3 req-c7c2d32f-60d9-4bd1-a34b-6468a7c5c0a1 service nova] Acquiring lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 544.241400] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Successfully created port: fab98fab-a200-4c3f-b028-89260b05d8c5 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 544.427993] env[62619]: DEBUG nova.scheduler.client.report [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 544.448069] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Releasing lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 544.448635] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 544.448706] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 544.449112] env[62619]: DEBUG oslo_concurrency.lockutils [req-81816f77-a8af-46f1-9a26-f049f8458ca3 req-c7c2d32f-60d9-4bd1-a34b-6468a7c5c0a1 service nova] Acquired lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 544.449224] env[62619]: DEBUG nova.network.neutron [req-81816f77-a8af-46f1-9a26-f049f8458ca3 req-c7c2d32f-60d9-4bd1-a34b-6468a7c5c0a1 service nova] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Refreshing network info cache for port a14ec30f-de96-4fd7-b218-8c0fb823620c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 544.453685] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f5a676e0-c659-48ee-8f2f-5580818e6bce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.467183] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dc6ab57-84d1-4299-915c-49faff24c0b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.491329] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f6d9ce5b-d610-4573-b43b-21836f7f8a1b could not be found. [ 544.492228] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 544.492228] env[62619]: INFO nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 544.492467] env[62619]: DEBUG oslo.service.loopingcall [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 544.492532] env[62619]: DEBUG nova.compute.manager [-] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 544.492610] env[62619]: DEBUG nova.network.neutron [-] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 544.522972] env[62619]: DEBUG nova.network.neutron [-] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 544.660533] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 544.688678] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 544.688941] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 544.689140] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 544.689332] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 544.689497] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 544.689650] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 544.689880] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 544.690284] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 544.691047] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 544.691047] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 544.691047] env[62619]: DEBUG nova.virt.hardware [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 544.692063] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aab9fd2c-8849-4e23-a786-a8fefe89773b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.701107] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ed15926-8fdd-4741-ad29-ccdded5088b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 544.711559] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Acquiring lock "606514b7-c9c5-43c1-bc71-5337228373f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 544.711786] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Lock "606514b7-c9c5-43c1-bc71-5337228373f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.933138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.317s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 544.933681] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 544.936613] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.182s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 544.938144] env[62619]: INFO nova.compute.claims [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 545.027078] env[62619]: DEBUG nova.network.neutron [-] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.057122] env[62619]: DEBUG nova.network.neutron [req-81816f77-a8af-46f1-9a26-f049f8458ca3 req-c7c2d32f-60d9-4bd1-a34b-6468a7c5c0a1 service nova] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 545.226281] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 545.246400] env[62619]: DEBUG nova.network.neutron [req-81816f77-a8af-46f1-9a26-f049f8458ca3 req-c7c2d32f-60d9-4bd1-a34b-6468a7c5c0a1 service nova] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 545.447172] env[62619]: DEBUG nova.compute.utils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 545.448856] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 545.449768] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 545.528942] env[62619]: INFO nova.compute.manager [-] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Took 1.04 seconds to deallocate network for instance. [ 545.535125] env[62619]: DEBUG nova.compute.claims [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 545.535125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.620285] env[62619]: DEBUG nova.policy [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '7b35985dcf7149b6b70b1606c7280fd4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09e520a3b3ab448fa026a6ca94e0d75e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 545.754293] env[62619]: DEBUG oslo_concurrency.lockutils [req-81816f77-a8af-46f1-9a26-f049f8458ca3 req-c7c2d32f-60d9-4bd1-a34b-6468a7c5c0a1 service nova] Releasing lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 545.767457] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 545.898096] env[62619]: ERROR nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. [ 545.898096] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 545.898096] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.898096] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 545.898096] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.898096] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 545.898096] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.898096] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 545.898096] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.898096] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 545.898096] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.898096] env[62619]: ERROR nova.compute.manager raise self.value [ 545.898096] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.898096] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 545.898096] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.898096] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 545.898661] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.898661] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 545.898661] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. [ 545.898661] env[62619]: ERROR nova.compute.manager [ 545.898661] env[62619]: Traceback (most recent call last): [ 545.898661] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 545.898661] env[62619]: listener.cb(fileno) [ 545.898661] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.898661] env[62619]: result = function(*args, **kwargs) [ 545.898661] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.898661] env[62619]: return func(*args, **kwargs) [ 545.898661] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.898661] env[62619]: raise e [ 545.898661] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.898661] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 545.898661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.898661] env[62619]: created_port_ids = self._update_ports_for_instance( [ 545.898661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.898661] env[62619]: with excutils.save_and_reraise_exception(): [ 545.898661] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.898661] env[62619]: self.force_reraise() [ 545.898661] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.898661] env[62619]: raise self.value [ 545.898661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.898661] env[62619]: updated_port = self._update_port( [ 545.898661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.898661] env[62619]: _ensure_no_port_binding_failure(port) [ 545.898661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.898661] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 545.900266] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. [ 545.900266] env[62619]: Removing descriptor: 16 [ 545.900266] env[62619]: ERROR nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Traceback (most recent call last): [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] yield resources [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self.driver.spawn(context, instance, image_meta, [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self._vmops.spawn(context, instance, image_meta, injected_files, [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 545.900266] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] vm_ref = self.build_virtual_machine(instance, [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] vif_infos = vmwarevif.get_vif_info(self._session, [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] for vif in network_info: [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] return self._sync_wrapper(fn, *args, **kwargs) [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self.wait() [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self[:] = self._gt.wait() [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] return self._exit_event.wait() [ 545.900632] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] result = hub.switch() [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] return self.greenlet.switch() [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] result = function(*args, **kwargs) [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] return func(*args, **kwargs) [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] raise e [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] nwinfo = self.network_api.allocate_for_instance( [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 545.900963] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] created_port_ids = self._update_ports_for_instance( [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] with excutils.save_and_reraise_exception(): [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self.force_reraise() [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] raise self.value [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] updated_port = self._update_port( [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] _ensure_no_port_binding_failure(port) [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 545.902478] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] raise exception.PortBindingFailed(port_id=port['id']) [ 545.902828] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] nova.exception.PortBindingFailed: Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. [ 545.902828] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] [ 545.902828] env[62619]: INFO nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Terminating instance [ 545.904558] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 545.904710] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquired lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 545.904912] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 545.950616] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 546.218073] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce96e4fe-3b1b-44f7-a3f8-885064d47d57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.230878] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f5017cd-d82c-4621-801a-4825f370931b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.274268] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdae17c0-3a46-4cac-98ce-1f0aa51c7536 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.282778] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e69e7443-48a8-4c51-96e8-f467456de09c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 546.303689] env[62619]: DEBUG nova.compute.provider_tree [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 546.449199] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 546.514806] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Successfully created port: 8623168c-a37b-474f-8a2c-e4312275c1c9 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 546.732926] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 546.806415] env[62619]: DEBUG nova.scheduler.client.report [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 546.961416] env[62619]: DEBUG nova.compute.manager [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Received event network-vif-deleted-a14ec30f-de96-4fd7-b218-8c0fb823620c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 546.961522] env[62619]: DEBUG nova.compute.manager [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Received event network-changed-5c856c2c-a8fe-4788-9891-9b7511a0008a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 546.962012] env[62619]: DEBUG nova.compute.manager [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Refreshing instance network info cache due to event network-changed-5c856c2c-a8fe-4788-9891-9b7511a0008a. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 546.962012] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] Acquiring lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 546.963189] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 547.000119] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 547.000390] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 547.000584] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 547.000712] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 547.000854] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 547.003021] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 547.005393] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 547.005580] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 547.007556] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 547.007556] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 547.007556] env[62619]: DEBUG nova.virt.hardware [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 547.007556] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32cac22b-ea44-475e-8721-39c042ab6a98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.016542] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a4645b7-d08d-4af5-b141-e649701b9028 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.237744] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Releasing lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 547.238127] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 547.238317] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 547.241040] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] Acquired lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 547.241040] env[62619]: DEBUG nova.network.neutron [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Refreshing network info cache for port 5c856c2c-a8fe-4788-9891-9b7511a0008a {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 547.243638] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bea1239-dff0-46bc-b71d-12568f3c21af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.254975] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30b5d1f8-49f5-460d-bf17-f6e7cd039eb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 547.284936] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1f94cb8f-7773-4330-ab1e-9ccad7585b07 could not be found. [ 547.285238] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 547.285349] env[62619]: INFO nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Took 0.05 seconds to destroy the instance on the hypervisor. [ 547.285610] env[62619]: DEBUG oslo.service.loopingcall [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 547.285812] env[62619]: DEBUG nova.compute.manager [-] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 547.285897] env[62619]: DEBUG nova.network.neutron [-] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 547.312968] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 547.313797] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 547.317784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.842s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 547.319521] env[62619]: INFO nova.compute.claims [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 547.329551] env[62619]: DEBUG nova.network.neutron [-] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.541302] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 547.542036] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 547.542512] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 547.544646] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 547.803015] env[62619]: DEBUG nova.network.neutron [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 547.825579] env[62619]: DEBUG nova.compute.utils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 547.834430] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 547.834430] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 547.838341] env[62619]: DEBUG nova.network.neutron [-] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.040389] env[62619]: DEBUG nova.network.neutron [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 548.050862] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 548.052933] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 548.053522] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 548.054427] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 548.055671] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 548.055671] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 548.055671] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 548.055671] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.055671] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.055905] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.056145] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.056286] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.056468] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.056665] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 548.057130] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 548.218732] env[62619]: ERROR nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. [ 548.218732] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 548.218732] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.218732] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 548.218732] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.218732] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 548.218732] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.218732] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 548.218732] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.218732] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 548.218732] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.218732] env[62619]: ERROR nova.compute.manager raise self.value [ 548.218732] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.218732] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 548.218732] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.218732] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 548.219365] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.219365] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 548.219365] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. [ 548.219365] env[62619]: ERROR nova.compute.manager [ 548.219365] env[62619]: Traceback (most recent call last): [ 548.219365] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 548.219365] env[62619]: listener.cb(fileno) [ 548.219365] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.219365] env[62619]: result = function(*args, **kwargs) [ 548.219365] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.219365] env[62619]: return func(*args, **kwargs) [ 548.219365] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.219365] env[62619]: raise e [ 548.219365] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.219365] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 548.219365] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.219365] env[62619]: created_port_ids = self._update_ports_for_instance( [ 548.219365] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.219365] env[62619]: with excutils.save_and_reraise_exception(): [ 548.219365] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.219365] env[62619]: self.force_reraise() [ 548.219365] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.219365] env[62619]: raise self.value [ 548.219365] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.219365] env[62619]: updated_port = self._update_port( [ 548.219365] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.219365] env[62619]: _ensure_no_port_binding_failure(port) [ 548.219365] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.219365] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 548.220093] env[62619]: nova.exception.PortBindingFailed: Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. [ 548.220093] env[62619]: Removing descriptor: 17 [ 548.220093] env[62619]: ERROR nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Traceback (most recent call last): [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] yield resources [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self.driver.spawn(context, instance, image_meta, [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 548.220093] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] vm_ref = self.build_virtual_machine(instance, [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] vif_infos = vmwarevif.get_vif_info(self._session, [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] for vif in network_info: [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] return self._sync_wrapper(fn, *args, **kwargs) [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self.wait() [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self[:] = self._gt.wait() [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] return self._exit_event.wait() [ 548.220413] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] result = hub.switch() [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] return self.greenlet.switch() [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] result = function(*args, **kwargs) [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] return func(*args, **kwargs) [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] raise e [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] nwinfo = self.network_api.allocate_for_instance( [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 548.220807] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] created_port_ids = self._update_ports_for_instance( [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] with excutils.save_and_reraise_exception(): [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self.force_reraise() [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] raise self.value [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] updated_port = self._update_port( [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] _ensure_no_port_binding_failure(port) [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 548.221210] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] raise exception.PortBindingFailed(port_id=port['id']) [ 548.221544] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] nova.exception.PortBindingFailed: Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. [ 548.221544] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] [ 548.221544] env[62619]: INFO nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Terminating instance [ 548.227599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Acquiring lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 548.227599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Acquired lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 548.227599] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 548.269969] env[62619]: DEBUG nova.policy [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2c3ac7a0e6964893a581a0c2f0b70459', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '701cb2dba8d14b448499613135fb789c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 548.335317] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 548.347761] env[62619]: INFO nova.compute.manager [-] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Took 1.06 seconds to deallocate network for instance. [ 548.348921] env[62619]: DEBUG nova.compute.claims [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 548.348921] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.510847] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2812478-a772-4ad6-a53b-9931fca819a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.520556] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a398047d-9d44-4c48-9eb5-08bb942a1e6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.567237] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] Releasing lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 548.567237] env[62619]: DEBUG nova.compute.manager [req-ed65cb97-79d3-46cc-a68d-bc9f4f9a5fd6 req-202cfaba-08ba-4af0-b00b-c46fee76d4c2 service nova] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Received event network-vif-deleted-5c856c2c-a8fe-4788-9891-9b7511a0008a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 548.567237] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 548.567908] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c509708-e0c5-44b5-9a45-ad166acc3951 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.575842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7103b99e-a157-4260-a9f0-682d1960805e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 548.590164] env[62619]: DEBUG nova.compute.provider_tree [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 548.765208] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 548.928923] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 549.093912] env[62619]: DEBUG nova.scheduler.client.report [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 549.351331] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 549.380454] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 549.380454] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 549.380454] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 549.380726] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 549.380726] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 549.381189] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 549.381639] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 549.382018] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 549.382336] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 549.382682] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 549.383115] env[62619]: DEBUG nova.virt.hardware [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 549.385108] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a5d1534-346a-4a10-80d7-bb48f3109303 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.396402] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cbd9fe2-6961-465f-b8c9-1842e7a100fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.399610] env[62619]: ERROR nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. [ 549.399610] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 549.399610] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.399610] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 549.399610] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.399610] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 549.399610] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.399610] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 549.399610] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.399610] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 549.399610] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.399610] env[62619]: ERROR nova.compute.manager raise self.value [ 549.399610] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.399610] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 549.399610] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.399610] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 549.400112] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.400112] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 549.400112] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. [ 549.400112] env[62619]: ERROR nova.compute.manager [ 549.400112] env[62619]: Traceback (most recent call last): [ 549.400112] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 549.400112] env[62619]: listener.cb(fileno) [ 549.400112] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.400112] env[62619]: result = function(*args, **kwargs) [ 549.400112] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.400112] env[62619]: return func(*args, **kwargs) [ 549.400112] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.400112] env[62619]: raise e [ 549.400112] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.400112] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 549.400112] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.400112] env[62619]: created_port_ids = self._update_ports_for_instance( [ 549.400112] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.400112] env[62619]: with excutils.save_and_reraise_exception(): [ 549.400112] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.400112] env[62619]: self.force_reraise() [ 549.400112] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.400112] env[62619]: raise self.value [ 549.400112] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.400112] env[62619]: updated_port = self._update_port( [ 549.400112] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.400112] env[62619]: _ensure_no_port_binding_failure(port) [ 549.400112] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.400112] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 549.401186] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. [ 549.401186] env[62619]: Removing descriptor: 14 [ 549.401186] env[62619]: ERROR nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Traceback (most recent call last): [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] yield resources [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self.driver.spawn(context, instance, image_meta, [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 549.401186] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] vm_ref = self.build_virtual_machine(instance, [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] vif_infos = vmwarevif.get_vif_info(self._session, [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] for vif in network_info: [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] return self._sync_wrapper(fn, *args, **kwargs) [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self.wait() [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self[:] = self._gt.wait() [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] return self._exit_event.wait() [ 549.401503] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] result = hub.switch() [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] return self.greenlet.switch() [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] result = function(*args, **kwargs) [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] return func(*args, **kwargs) [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] raise e [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] nwinfo = self.network_api.allocate_for_instance( [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 549.401822] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] created_port_ids = self._update_ports_for_instance( [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] with excutils.save_and_reraise_exception(): [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self.force_reraise() [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] raise self.value [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] updated_port = self._update_port( [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] _ensure_no_port_binding_failure(port) [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 549.402178] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] raise exception.PortBindingFailed(port_id=port['id']) [ 549.402490] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] nova.exception.PortBindingFailed: Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. [ 549.402490] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] [ 549.402490] env[62619]: INFO nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Terminating instance [ 549.403575] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Acquiring lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.403575] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Acquired lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.407998] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 549.432695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Releasing lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 549.432695] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 549.432695] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 549.432695] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6ea122ef-11e4-4ba4-be8d-6f6bead9a6d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.441622] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e91c24e7-6849-44e4-9c88-54473d4cf48d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 549.464095] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0c828d87-be04-47a4-87f9-a0f54622326b could not be found. [ 549.464095] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 549.465355] env[62619]: INFO nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 549.465355] env[62619]: DEBUG oslo.service.loopingcall [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 549.465355] env[62619]: DEBUG nova.compute.manager [-] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 549.465355] env[62619]: DEBUG nova.network.neutron [-] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 549.491463] env[62619]: DEBUG nova.network.neutron [-] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.600739] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.283s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 549.601180] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 549.607046] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 549.607140] env[62619]: INFO nova.compute.claims [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 549.702510] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Successfully created port: 49dfc77a-a416-4d6e-814b-bc9408bcdde0 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 549.780107] env[62619]: DEBUG nova.compute.manager [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Received event network-changed-fab98fab-a200-4c3f-b028-89260b05d8c5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 549.780318] env[62619]: DEBUG nova.compute.manager [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Refreshing instance network info cache due to event network-changed-fab98fab-a200-4c3f-b028-89260b05d8c5. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 549.780546] env[62619]: DEBUG oslo_concurrency.lockutils [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] Acquiring lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 549.780703] env[62619]: DEBUG oslo_concurrency.lockutils [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] Acquired lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 549.781192] env[62619]: DEBUG nova.network.neutron [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Refreshing network info cache for port fab98fab-a200-4c3f-b028-89260b05d8c5 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 549.977084] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 549.994925] env[62619]: DEBUG nova.network.neutron [-] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.113803] env[62619]: DEBUG nova.compute.utils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 550.125083] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 550.125083] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 550.171682] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.272283] env[62619]: DEBUG nova.policy [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b7f289af8d345949a3c3cba8821c545', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90c788067d2640bfa69e51fb796fffa0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 550.343022] env[62619]: DEBUG nova.network.neutron [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.498142] env[62619]: INFO nova.compute.manager [-] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Took 1.03 seconds to deallocate network for instance. [ 550.500968] env[62619]: DEBUG nova.compute.claims [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 550.501084] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 550.626440] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 550.675793] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Releasing lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 550.676672] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 550.676922] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 550.678399] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22ead00a-eabb-4b2c-9a07-e57d767e93fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.689638] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-943bcc4d-3692-44d7-a06e-ee6be6a317f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.717245] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72 could not be found. [ 550.717599] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 550.717852] env[62619]: INFO nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Took 0.04 seconds to destroy the instance on the hypervisor. [ 550.718163] env[62619]: DEBUG oslo.service.loopingcall [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 550.721307] env[62619]: DEBUG nova.compute.manager [-] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 550.721475] env[62619]: DEBUG nova.network.neutron [-] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 550.728702] env[62619]: DEBUG nova.network.neutron [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 550.784078] env[62619]: DEBUG nova.network.neutron [-] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 550.799172] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37f5bdf3-ed00-4dfd-9779-cb7dd7b20505 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.807629] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45860080-b6c3-403a-94a6-9dfd55b746e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.843317] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30f3159-91df-43a9-8fdf-9c5b863314d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.851023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735bb33b-2dda-46ed-aecb-a4fe7db3c2e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 550.872264] env[62619]: DEBUG nova.compute.provider_tree [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 551.230668] env[62619]: DEBUG oslo_concurrency.lockutils [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] Releasing lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 551.230668] env[62619]: DEBUG nova.compute.manager [req-036f6944-d3eb-4ec3-959c-44ab6a2477bb req-1182c8af-a3b6-42bc-b157-73e06bbddf24 service nova] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Received event network-vif-deleted-fab98fab-a200-4c3f-b028-89260b05d8c5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 551.289886] env[62619]: DEBUG nova.network.neutron [-] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 551.313215] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Successfully created port: 289bd473-7ed6-482d-9825-dc74f2cb5379 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 551.379112] env[62619]: DEBUG nova.scheduler.client.report [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 551.639469] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 551.682605] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 551.682605] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 551.682605] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 551.682945] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 551.682945] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 551.682945] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 551.682945] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 551.684235] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 551.684510] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 551.684688] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 551.684859] env[62619]: DEBUG nova.virt.hardware [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 551.685779] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c32397c-0b36-49dd-bc13-f3e1d148875b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.697423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Acquiring lock "98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.697423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Lock "98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 551.710709] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-143ad3b7-3c84-49f8-925d-0dbf2a8bca2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 551.791721] env[62619]: INFO nova.compute.manager [-] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Took 1.07 seconds to deallocate network for instance. [ 551.795691] env[62619]: DEBUG nova.compute.claims [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 551.796279] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 551.884999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.279s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 551.884999] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 551.888383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 6.353s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.036638] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Acquiring lock "72997052-6221-4a7b-abb7-07a7ce87bf3c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.036915] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Lock "72997052-6221-4a7b-abb7-07a7ce87bf3c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 552.207155] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 552.393062] env[62619]: DEBUG nova.compute.utils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 552.399533] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 552.404019] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 552.541192] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 552.586250] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a72c041c-4842-472a-b1ae-3b50ab8ed197 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.597672] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ca97b30-22b7-4206-8620-964bd623d401 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.632040] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bfa6a2e-c498-40d2-a47a-ed9c14b51718 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.641609] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8222928-09f0-4f51-ab93-a567d5279bcd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 552.657825] env[62619]: DEBUG nova.compute.provider_tree [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 552.689278] env[62619]: DEBUG nova.policy [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '028f86039da04407a7ad3f8f72082010', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fa0d16c7ccd643e6b35bcc19aeff0050', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 552.728781] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 552.756105] env[62619]: ERROR nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. [ 552.756105] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 552.756105] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.756105] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 552.756105] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.756105] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 552.756105] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.756105] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 552.756105] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.756105] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 552.756105] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.756105] env[62619]: ERROR nova.compute.manager raise self.value [ 552.756105] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.756105] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 552.756105] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.756105] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 552.757755] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.757755] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 552.757755] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. [ 552.757755] env[62619]: ERROR nova.compute.manager [ 552.757755] env[62619]: Traceback (most recent call last): [ 552.757755] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 552.757755] env[62619]: listener.cb(fileno) [ 552.757755] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.757755] env[62619]: result = function(*args, **kwargs) [ 552.757755] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 552.757755] env[62619]: return func(*args, **kwargs) [ 552.757755] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.757755] env[62619]: raise e [ 552.757755] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.757755] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 552.757755] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.757755] env[62619]: created_port_ids = self._update_ports_for_instance( [ 552.757755] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.757755] env[62619]: with excutils.save_and_reraise_exception(): [ 552.757755] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.757755] env[62619]: self.force_reraise() [ 552.757755] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.757755] env[62619]: raise self.value [ 552.757755] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.757755] env[62619]: updated_port = self._update_port( [ 552.757755] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.757755] env[62619]: _ensure_no_port_binding_failure(port) [ 552.757755] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.757755] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 552.758618] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. [ 552.758618] env[62619]: Removing descriptor: 18 [ 552.758618] env[62619]: ERROR nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Traceback (most recent call last): [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] yield resources [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self.driver.spawn(context, instance, image_meta, [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 552.758618] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] vm_ref = self.build_virtual_machine(instance, [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] vif_infos = vmwarevif.get_vif_info(self._session, [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] for vif in network_info: [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] return self._sync_wrapper(fn, *args, **kwargs) [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self.wait() [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self[:] = self._gt.wait() [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] return self._exit_event.wait() [ 552.759025] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] result = hub.switch() [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] return self.greenlet.switch() [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] result = function(*args, **kwargs) [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] return func(*args, **kwargs) [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] raise e [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] nwinfo = self.network_api.allocate_for_instance( [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 552.759453] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] created_port_ids = self._update_ports_for_instance( [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] with excutils.save_and_reraise_exception(): [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self.force_reraise() [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] raise self.value [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] updated_port = self._update_port( [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] _ensure_no_port_binding_failure(port) [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 552.759846] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] raise exception.PortBindingFailed(port_id=port['id']) [ 552.760183] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] nova.exception.PortBindingFailed: Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. [ 552.760183] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] [ 552.760183] env[62619]: INFO nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Terminating instance [ 552.762056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Acquiring lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 552.762056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Acquired lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 552.762056] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 552.896117] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 553.072141] env[62619]: DEBUG nova.compute.manager [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Received event network-changed-8623168c-a37b-474f-8a2c-e4312275c1c9 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 553.072141] env[62619]: DEBUG nova.compute.manager [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Refreshing instance network info cache due to event network-changed-8623168c-a37b-474f-8a2c-e4312275c1c9. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 553.072141] env[62619]: DEBUG oslo_concurrency.lockutils [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] Acquiring lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.072141] env[62619]: DEBUG oslo_concurrency.lockutils [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] Acquired lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.072141] env[62619]: DEBUG nova.network.neutron [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Refreshing network info cache for port 8623168c-a37b-474f-8a2c-e4312275c1c9 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 553.076447] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 553.160887] env[62619]: DEBUG nova.scheduler.client.report [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 553.310413] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.513606] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.605116] env[62619]: DEBUG nova.network.neutron [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 553.673110] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.784s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 553.673110] env[62619]: ERROR nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. [ 553.673110] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Traceback (most recent call last): [ 553.673110] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 553.673110] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self.driver.spawn(context, instance, image_meta, [ 553.673110] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 553.673110] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 553.673110] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 553.673110] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] vm_ref = self.build_virtual_machine(instance, [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] vif_infos = vmwarevif.get_vif_info(self._session, [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] for vif in network_info: [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] return self._sync_wrapper(fn, *args, **kwargs) [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self.wait() [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self[:] = self._gt.wait() [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] return self._exit_event.wait() [ 553.673782] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] result = hub.switch() [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] return self.greenlet.switch() [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] result = function(*args, **kwargs) [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] return func(*args, **kwargs) [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] raise e [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] nwinfo = self.network_api.allocate_for_instance( [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 553.674245] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] created_port_ids = self._update_ports_for_instance( [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] with excutils.save_and_reraise_exception(): [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] self.force_reraise() [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] raise self.value [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] updated_port = self._update_port( [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] _ensure_no_port_binding_failure(port) [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 553.674689] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] raise exception.PortBindingFailed(port_id=port['id']) [ 553.675745] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] nova.exception.PortBindingFailed: Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. [ 553.675745] env[62619]: ERROR nova.compute.manager [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] [ 553.675745] env[62619]: DEBUG nova.compute.utils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 553.675745] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.909s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 553.678239] env[62619]: INFO nova.compute.claims [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 553.688091] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Build of instance f6d9ce5b-d610-4573-b43b-21836f7f8a1b was re-scheduled: Binding failed for port a14ec30f-de96-4fd7-b218-8c0fb823620c, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 553.688643] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 553.688869] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Acquiring lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 553.689066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Acquired lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 553.689159] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 553.705195] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Successfully created port: 4f3424b7-9612-47ac-abc8-a9245b1cf289 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 553.764861] env[62619]: DEBUG nova.network.neutron [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 553.911129] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 553.942868] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 553.943866] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 553.943866] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 553.943866] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 553.943866] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 553.943866] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 553.943866] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 553.944320] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 553.944320] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 553.944385] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 553.945520] env[62619]: DEBUG nova.virt.hardware [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 553.945520] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0681c603-3f94-4c57-b37e-2f242b05cb57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 553.957350] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a83f4f9d-9152-421a-9213-cce26a6f1f78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.018061] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Releasing lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.018851] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 554.018851] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 554.019141] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-699e4583-084d-4a9f-b1e1-643bcde7738d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.032331] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01c6545d-a6f1-43be-b475-a26949cef131 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.055338] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 88346467-e1ac-4647-bcf7-063636853a2b could not be found. [ 554.055572] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 554.055749] env[62619]: INFO nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 554.055990] env[62619]: DEBUG oslo.service.loopingcall [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 554.056671] env[62619]: DEBUG nova.compute.manager [-] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 554.056797] env[62619]: DEBUG nova.network.neutron [-] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 554.099459] env[62619]: DEBUG nova.network.neutron [-] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.114239] env[62619]: DEBUG nova.compute.manager [req-49838481-30b4-4c61-b6e4-8b9083cea12a req-82665e9a-8de7-4d80-849d-10064e3a02e6 service nova] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Received event network-changed-49dfc77a-a416-4d6e-814b-bc9408bcdde0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.115482] env[62619]: DEBUG nova.compute.manager [req-49838481-30b4-4c61-b6e4-8b9083cea12a req-82665e9a-8de7-4d80-849d-10064e3a02e6 service nova] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Refreshing instance network info cache due to event network-changed-49dfc77a-a416-4d6e-814b-bc9408bcdde0. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 554.115853] env[62619]: DEBUG oslo_concurrency.lockutils [req-49838481-30b4-4c61-b6e4-8b9083cea12a req-82665e9a-8de7-4d80-849d-10064e3a02e6 service nova] Acquiring lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.115917] env[62619]: DEBUG oslo_concurrency.lockutils [req-49838481-30b4-4c61-b6e4-8b9083cea12a req-82665e9a-8de7-4d80-849d-10064e3a02e6 service nova] Acquired lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.116230] env[62619]: DEBUG nova.network.neutron [req-49838481-30b4-4c61-b6e4-8b9083cea12a req-82665e9a-8de7-4d80-849d-10064e3a02e6 service nova] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Refreshing network info cache for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 554.220144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Acquiring lock "748a4c26-6df7-4a4c-b81a-0b3e59a8b936" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.220144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Lock "748a4c26-6df7-4a4c-b81a-0b3e59a8b936" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.229631] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.267492] env[62619]: DEBUG oslo_concurrency.lockutils [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] Releasing lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 554.267769] env[62619]: DEBUG nova.compute.manager [req-47139253-ea6c-4297-b558-4b40907a9333 req-1c3bd3ce-add0-42a3-87e8-e439af254501 service nova] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Received event network-vif-deleted-8623168c-a37b-474f-8a2c-e4312275c1c9 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 554.510879] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.520346] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Acquiring lock "5e419be8-cfb9-4819-8e92-873daa313d7a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 554.520470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Lock "5e419be8-cfb9-4819-8e92-873daa313d7a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 554.601305] env[62619]: DEBUG nova.network.neutron [-] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.624177] env[62619]: ERROR nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. [ 554.624177] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 554.624177] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 554.624177] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 554.624177] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 554.624177] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 554.624177] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 554.624177] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 554.624177] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.624177] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 554.624177] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.624177] env[62619]: ERROR nova.compute.manager raise self.value [ 554.624177] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 554.624177] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 554.624177] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.624177] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 554.624670] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.624670] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 554.624670] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. [ 554.624670] env[62619]: ERROR nova.compute.manager [ 554.624670] env[62619]: Traceback (most recent call last): [ 554.624670] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 554.624670] env[62619]: listener.cb(fileno) [ 554.624670] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 554.624670] env[62619]: result = function(*args, **kwargs) [ 554.624670] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 554.624670] env[62619]: return func(*args, **kwargs) [ 554.624670] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 554.624670] env[62619]: raise e [ 554.624670] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 554.624670] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 554.624670] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 554.624670] env[62619]: created_port_ids = self._update_ports_for_instance( [ 554.624670] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 554.624670] env[62619]: with excutils.save_and_reraise_exception(): [ 554.624670] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.624670] env[62619]: self.force_reraise() [ 554.624670] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.624670] env[62619]: raise self.value [ 554.624670] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 554.624670] env[62619]: updated_port = self._update_port( [ 554.624670] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.624670] env[62619]: _ensure_no_port_binding_failure(port) [ 554.624670] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.624670] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 554.625505] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. [ 554.625505] env[62619]: Removing descriptor: 17 [ 554.625505] env[62619]: ERROR nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Traceback (most recent call last): [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] yield resources [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self.driver.spawn(context, instance, image_meta, [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 554.625505] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] vm_ref = self.build_virtual_machine(instance, [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] vif_infos = vmwarevif.get_vif_info(self._session, [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] for vif in network_info: [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] return self._sync_wrapper(fn, *args, **kwargs) [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self.wait() [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self[:] = self._gt.wait() [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] return self._exit_event.wait() [ 554.625870] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] result = hub.switch() [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] return self.greenlet.switch() [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] result = function(*args, **kwargs) [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] return func(*args, **kwargs) [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] raise e [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] nwinfo = self.network_api.allocate_for_instance( [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 554.626343] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] created_port_ids = self._update_ports_for_instance( [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] with excutils.save_and_reraise_exception(): [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self.force_reraise() [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] raise self.value [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] updated_port = self._update_port( [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] _ensure_no_port_binding_failure(port) [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 554.626688] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] raise exception.PortBindingFailed(port_id=port['id']) [ 554.626980] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] nova.exception.PortBindingFailed: Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. [ 554.626980] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] [ 554.626980] env[62619]: INFO nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Terminating instance [ 554.629316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 554.629316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquired lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 554.629316] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 554.651817] env[62619]: DEBUG nova.network.neutron [req-49838481-30b4-4c61-b6e4-8b9083cea12a req-82665e9a-8de7-4d80-849d-10064e3a02e6 service nova] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 554.938464] env[62619]: DEBUG nova.network.neutron [req-49838481-30b4-4c61-b6e4-8b9083cea12a req-82665e9a-8de7-4d80-849d-10064e3a02e6 service nova] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 554.972702] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d80a9c2-67a9-4a02-a6c6-3e76c759083d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 554.987121] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01cf2ab6-bcc7-475b-b76d-9336d2608815 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.030132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Releasing lock "refresh_cache-f6d9ce5b-d610-4573-b43b-21836f7f8a1b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.030132] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 555.030132] env[62619]: DEBUG nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 555.030132] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 555.031881] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183383b7-dd16-46dc-9034-be392efaff63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.042254] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78cf5318-7b76-4b04-857c-2dbe55c80cdf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.057457] env[62619]: DEBUG nova.compute.provider_tree [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 555.085232] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.104184] env[62619]: INFO nova.compute.manager [-] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Took 1.05 seconds to deallocate network for instance. [ 555.107540] env[62619]: DEBUG nova.compute.claims [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 555.107731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.159179] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 555.378026] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.444516] env[62619]: DEBUG oslo_concurrency.lockutils [req-49838481-30b4-4c61-b6e4-8b9083cea12a req-82665e9a-8de7-4d80-849d-10064e3a02e6 service nova] Releasing lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.467059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Acquiring lock "89638fb3-c5f4-48a4-948b-fb6220ed1dca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.467207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Lock "89638fb3-c5f4-48a4-948b-fb6220ed1dca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.561620] env[62619]: DEBUG nova.scheduler.client.report [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 555.587542] env[62619]: DEBUG nova.network.neutron [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 555.742875] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "79a94ed1-1139-4194-8091-00b7b1562330" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 555.743596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "79a94ed1-1139-4194-8091-00b7b1562330" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 555.881545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Releasing lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 555.881871] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 555.882088] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 555.882387] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ff832792-e452-40c2-8c87-de6a92d51679 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.895133] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8dd999e-3283-428a-9a2f-36718d78d57e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 555.921864] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4 could not be found. [ 555.922103] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 555.922322] env[62619]: INFO nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 555.922527] env[62619]: DEBUG oslo.service.loopingcall [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 555.922778] env[62619]: DEBUG nova.compute.manager [-] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 555.923186] env[62619]: DEBUG nova.network.neutron [-] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 555.955654] env[62619]: DEBUG nova.network.neutron [-] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 556.070466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.395s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 556.070987] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 556.074203] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.725s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 556.092496] env[62619]: INFO nova.compute.manager [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] [instance: f6d9ce5b-d610-4573-b43b-21836f7f8a1b] Took 1.06 seconds to deallocate network for instance. [ 556.459359] env[62619]: DEBUG nova.network.neutron [-] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 556.587065] env[62619]: DEBUG nova.compute.utils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 556.590719] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 556.590882] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 556.724726] env[62619]: DEBUG nova.policy [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '101927e87ca54ff3a8387f40c8790dd2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd777322bcf3a442a8c473f8fca2d9129', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 556.730870] env[62619]: DEBUG nova.compute.manager [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Received event network-changed-289bd473-7ed6-482d-9825-dc74f2cb5379 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 556.730870] env[62619]: DEBUG nova.compute.manager [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Refreshing instance network info cache due to event network-changed-289bd473-7ed6-482d-9825-dc74f2cb5379. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 556.730870] env[62619]: DEBUG oslo_concurrency.lockutils [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] Acquiring lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.730870] env[62619]: DEBUG oslo_concurrency.lockutils [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] Acquired lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.731065] env[62619]: DEBUG nova.network.neutron [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Refreshing network info cache for port 289bd473-7ed6-482d-9825-dc74f2cb5379 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 556.767051] env[62619]: ERROR nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. [ 556.767051] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 556.767051] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.767051] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 556.767051] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.767051] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 556.767051] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.767051] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 556.767051] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.767051] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 556.767051] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.767051] env[62619]: ERROR nova.compute.manager raise self.value [ 556.767051] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.767051] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 556.767051] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.767051] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 556.767536] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.767536] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 556.767536] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. [ 556.767536] env[62619]: ERROR nova.compute.manager [ 556.767536] env[62619]: Traceback (most recent call last): [ 556.767536] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 556.767536] env[62619]: listener.cb(fileno) [ 556.767536] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.767536] env[62619]: result = function(*args, **kwargs) [ 556.767536] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 556.767536] env[62619]: return func(*args, **kwargs) [ 556.767536] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 556.767536] env[62619]: raise e [ 556.767536] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.767536] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 556.767536] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.767536] env[62619]: created_port_ids = self._update_ports_for_instance( [ 556.767536] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.767536] env[62619]: with excutils.save_and_reraise_exception(): [ 556.767536] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.767536] env[62619]: self.force_reraise() [ 556.767536] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.767536] env[62619]: raise self.value [ 556.767536] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.767536] env[62619]: updated_port = self._update_port( [ 556.767536] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.767536] env[62619]: _ensure_no_port_binding_failure(port) [ 556.767536] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.767536] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 556.768336] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. [ 556.768336] env[62619]: Removing descriptor: 16 [ 556.768336] env[62619]: ERROR nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Traceback (most recent call last): [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] yield resources [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self.driver.spawn(context, instance, image_meta, [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self._vmops.spawn(context, instance, image_meta, injected_files, [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 556.768336] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] vm_ref = self.build_virtual_machine(instance, [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] vif_infos = vmwarevif.get_vif_info(self._session, [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] for vif in network_info: [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] return self._sync_wrapper(fn, *args, **kwargs) [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self.wait() [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self[:] = self._gt.wait() [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] return self._exit_event.wait() [ 556.768632] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] result = hub.switch() [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] return self.greenlet.switch() [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] result = function(*args, **kwargs) [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] return func(*args, **kwargs) [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] raise e [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] nwinfo = self.network_api.allocate_for_instance( [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 556.768960] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] created_port_ids = self._update_ports_for_instance( [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] with excutils.save_and_reraise_exception(): [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self.force_reraise() [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] raise self.value [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] updated_port = self._update_port( [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] _ensure_no_port_binding_failure(port) [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 556.769335] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] raise exception.PortBindingFailed(port_id=port['id']) [ 556.769750] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] nova.exception.PortBindingFailed: Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. [ 556.769750] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] [ 556.769750] env[62619]: INFO nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Terminating instance [ 556.770888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Acquiring lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 556.770888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Acquired lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 556.770970] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 556.840159] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed872b2a-6cc8-4dbc-9e7e-70df89990521 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.847736] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b646dec-88d9-4432-9257-3c820f4f7f69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.881471] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ca36f5-ea80-48f6-818e-7a76e82ff94e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.889140] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc4ce35-a098-4d57-9522-f34a0fe19d39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 556.905847] env[62619]: DEBUG nova.compute.provider_tree [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 556.966556] env[62619]: INFO nova.compute.manager [-] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Took 1.04 seconds to deallocate network for instance. [ 556.969797] env[62619]: DEBUG nova.compute.claims [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 556.969916] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.054394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Acquiring lock "d22ac031-3202-457c-8bb7-d557ad0fe9f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 557.054499] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Lock "d22ac031-3202-457c-8bb7-d557ad0fe9f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.094224] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 557.118963] env[62619]: DEBUG nova.compute.manager [req-69f21a0b-65a6-44df-ba29-4ee76d01595d req-9d385e73-9ca9-4373-b7cd-445dab5b1268 service nova] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Received event network-vif-deleted-49dfc77a-a416-4d6e-814b-bc9408bcdde0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 557.155217] env[62619]: INFO nova.scheduler.client.report [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Deleted allocations for instance f6d9ce5b-d610-4573-b43b-21836f7f8a1b [ 557.315030] env[62619]: DEBUG nova.network.neutron [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.338865] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 557.411532] env[62619]: DEBUG nova.scheduler.client.report [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 557.588601] env[62619]: DEBUG nova.network.neutron [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.602129] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 557.666264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9f7ef537-e592-4323-89f3-aea5609e923d tempest-TenantUsagesTestJSON-609128681 tempest-TenantUsagesTestJSON-609128681-project-member] Lock "f6d9ce5b-d610-4573-b43b-21836f7f8a1b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.328s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.841248] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Successfully created port: f9543b24-561b-4d98-b029-b3a6502c955b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 557.927766] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.853s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.928515] env[62619]: ERROR nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Traceback (most recent call last): [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self.driver.spawn(context, instance, image_meta, [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self._vmops.spawn(context, instance, image_meta, injected_files, [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] vm_ref = self.build_virtual_machine(instance, [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] vif_infos = vmwarevif.get_vif_info(self._session, [ 557.928515] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] for vif in network_info: [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] return self._sync_wrapper(fn, *args, **kwargs) [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self.wait() [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self[:] = self._gt.wait() [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] return self._exit_event.wait() [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] result = hub.switch() [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 557.928861] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] return self.greenlet.switch() [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] result = function(*args, **kwargs) [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] return func(*args, **kwargs) [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] raise e [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] nwinfo = self.network_api.allocate_for_instance( [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] created_port_ids = self._update_ports_for_instance( [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] with excutils.save_and_reraise_exception(): [ 557.929348] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] self.force_reraise() [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] raise self.value [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] updated_port = self._update_port( [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] _ensure_no_port_binding_failure(port) [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] raise exception.PortBindingFailed(port_id=port['id']) [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] nova.exception.PortBindingFailed: Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. [ 557.929725] env[62619]: ERROR nova.compute.manager [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] [ 557.930071] env[62619]: DEBUG nova.compute.utils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 557.930756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 9.364s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.930756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 557.930756] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 557.931211] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 7.430s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 557.934558] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b3f1f2-05a9-4698-a10c-8e3e306ca5ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.937833] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Build of instance 1f94cb8f-7773-4330-ab1e-9ccad7585b07 was re-scheduled: Binding failed for port 5c856c2c-a8fe-4788-9891-9b7511a0008a, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 557.938266] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 557.938508] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 557.938650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquired lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 557.938813] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 557.946565] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0900b6b-6e0e-40c4-8716-54ebeb7f88c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.965887] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de46e5d-c9ad-4654-a4d6-cc01e55ee903 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 557.974175] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb9d616-9257-4803-9af7-08e077f98c7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.007724] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181426MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 558.007927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.095692] env[62619]: DEBUG oslo_concurrency.lockutils [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] Releasing lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.095692] env[62619]: DEBUG nova.compute.manager [req-30bdf8bf-94a7-4a8c-a3df-0b417756bbd3 req-6acefb4d-4f12-45bb-aec8-5e0f5f6a24b3 service nova] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Received event network-vif-deleted-289bd473-7ed6-482d-9825-dc74f2cb5379 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 558.113992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Releasing lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 558.113992] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 558.113992] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 558.113992] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 558.118119] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5848fd1b-2b7e-462b-822a-05d215c067e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.131842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6f66626-a675-4417-a7bf-9ef1f08bef0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.157916] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 558.158250] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 558.158365] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 558.158555] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 558.158699] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 558.158900] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 558.160766] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 558.160937] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 558.160985] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 558.161808] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 558.162024] env[62619]: DEBUG nova.virt.hardware [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 558.162949] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f09b97b9-f656-4f3f-b9c1-e3e56b83c9d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.173293] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 558.176859] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 67cf14dd-309a-4abb-b8e0-bc289c7cc845 could not be found. [ 558.177206] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 558.177248] env[62619]: INFO nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Took 0.06 seconds to destroy the instance on the hypervisor. [ 558.177551] env[62619]: DEBUG oslo.service.loopingcall [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 558.179941] env[62619]: DEBUG nova.compute.manager [-] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 558.180055] env[62619]: DEBUG nova.network.neutron [-] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 558.192756] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c560815-7362-4a17-b20c-5e6589ba2df9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.211276] env[62619]: DEBUG nova.network.neutron [-] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.496212] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 558.652938] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.714864] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 558.715022] env[62619]: DEBUG nova.network.neutron [-] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 558.755282] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf178568-502a-4c50-a9ce-3d517ad93ad7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.763784] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22089855-790a-4442-82f6-9ea826cb57d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.799545] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9538130a-7b45-4d1f-8270-a489406bd90b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.810209] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2c08e91-0808-49ce-bc99-01c947426d4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 558.828167] env[62619]: DEBUG nova.compute.provider_tree [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 559.155374] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Releasing lock "refresh_cache-1f94cb8f-7773-4330-ab1e-9ccad7585b07" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 559.156341] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 559.156341] env[62619]: DEBUG nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 559.156341] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 559.217456] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 559.219638] env[62619]: INFO nova.compute.manager [-] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Took 1.04 seconds to deallocate network for instance. [ 559.223078] env[62619]: DEBUG nova.compute.claims [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 559.223078] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 559.332970] env[62619]: DEBUG nova.scheduler.client.report [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 559.722529] env[62619]: DEBUG nova.network.neutron [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 559.839481] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.907s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 559.839481] env[62619]: ERROR nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. [ 559.839481] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Traceback (most recent call last): [ 559.839481] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 559.839481] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self.driver.spawn(context, instance, image_meta, [ 559.839481] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 559.839481] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 559.839481] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 559.839481] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] vm_ref = self.build_virtual_machine(instance, [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] vif_infos = vmwarevif.get_vif_info(self._session, [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] for vif in network_info: [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] return self._sync_wrapper(fn, *args, **kwargs) [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self.wait() [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self[:] = self._gt.wait() [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] return self._exit_event.wait() [ 559.839799] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] result = hub.switch() [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] return self.greenlet.switch() [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] result = function(*args, **kwargs) [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] return func(*args, **kwargs) [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] raise e [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] nwinfo = self.network_api.allocate_for_instance( [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 559.840329] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] created_port_ids = self._update_ports_for_instance( [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] with excutils.save_and_reraise_exception(): [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] self.force_reraise() [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] raise self.value [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] updated_port = self._update_port( [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] _ensure_no_port_binding_failure(port) [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 559.840676] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] raise exception.PortBindingFailed(port_id=port['id']) [ 559.840965] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] nova.exception.PortBindingFailed: Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. [ 559.840965] env[62619]: ERROR nova.compute.manager [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] [ 559.840965] env[62619]: DEBUG nova.compute.utils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 559.841261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 8.045s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 559.845769] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Build of instance 0c828d87-be04-47a4-87f9-a0f54622326b was re-scheduled: Binding failed for port fab98fab-a200-4c3f-b028-89260b05d8c5, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 559.846237] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 559.846830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Acquiring lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 559.846830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Acquired lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 559.846830] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 560.226140] env[62619]: INFO nova.compute.manager [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 1f94cb8f-7773-4330-ab1e-9ccad7585b07] Took 1.07 seconds to deallocate network for instance. [ 560.405755] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 560.615505] env[62619]: ERROR nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. [ 560.615505] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 560.615505] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.615505] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 560.615505] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.615505] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 560.615505] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.615505] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 560.615505] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.615505] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 560.615505] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.615505] env[62619]: ERROR nova.compute.manager raise self.value [ 560.615505] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.615505] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 560.615505] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.615505] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 560.615973] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.615973] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 560.615973] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. [ 560.615973] env[62619]: ERROR nova.compute.manager [ 560.615973] env[62619]: Traceback (most recent call last): [ 560.615973] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 560.615973] env[62619]: listener.cb(fileno) [ 560.615973] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.615973] env[62619]: result = function(*args, **kwargs) [ 560.615973] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 560.615973] env[62619]: return func(*args, **kwargs) [ 560.615973] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 560.615973] env[62619]: raise e [ 560.615973] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.615973] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 560.615973] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.615973] env[62619]: created_port_ids = self._update_ports_for_instance( [ 560.615973] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.615973] env[62619]: with excutils.save_and_reraise_exception(): [ 560.615973] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.615973] env[62619]: self.force_reraise() [ 560.615973] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.615973] env[62619]: raise self.value [ 560.615973] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.615973] env[62619]: updated_port = self._update_port( [ 560.615973] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.615973] env[62619]: _ensure_no_port_binding_failure(port) [ 560.615973] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.615973] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 560.616797] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. [ 560.616797] env[62619]: Removing descriptor: 18 [ 560.616797] env[62619]: ERROR nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Traceback (most recent call last): [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] yield resources [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self.driver.spawn(context, instance, image_meta, [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 560.616797] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] vm_ref = self.build_virtual_machine(instance, [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] for vif in network_info: [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] return self._sync_wrapper(fn, *args, **kwargs) [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self.wait() [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self[:] = self._gt.wait() [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] return self._exit_event.wait() [ 560.617160] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] result = hub.switch() [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] return self.greenlet.switch() [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] result = function(*args, **kwargs) [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] return func(*args, **kwargs) [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] raise e [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] nwinfo = self.network_api.allocate_for_instance( [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 560.617513] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] created_port_ids = self._update_ports_for_instance( [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] with excutils.save_and_reraise_exception(): [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self.force_reraise() [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] raise self.value [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] updated_port = self._update_port( [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] _ensure_no_port_binding_failure(port) [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 560.617861] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] raise exception.PortBindingFailed(port_id=port['id']) [ 560.618208] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] nova.exception.PortBindingFailed: Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. [ 560.618208] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] [ 560.618208] env[62619]: INFO nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Terminating instance [ 560.623083] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 560.628208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Acquiring lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.628407] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Acquired lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.628605] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 560.646632] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f578c99-5717-44c4-9aa2-749f7615f786 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.655881] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3835323d-2195-49e4-b199-a7b3c1096ed2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.695487] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5af2a112-2074-41ef-9173-7c47ab250c6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.699268] env[62619]: DEBUG nova.compute.manager [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Received event network-changed-4f3424b7-9612-47ac-abc8-a9245b1cf289 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 560.699470] env[62619]: DEBUG nova.compute.manager [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Refreshing instance network info cache due to event network-changed-4f3424b7-9612-47ac-abc8-a9245b1cf289. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 560.699949] env[62619]: DEBUG oslo_concurrency.lockutils [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] Acquiring lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 560.699949] env[62619]: DEBUG oslo_concurrency.lockutils [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] Acquired lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 560.699949] env[62619]: DEBUG nova.network.neutron [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Refreshing network info cache for port 4f3424b7-9612-47ac-abc8-a9245b1cf289 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 560.709873] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10df03fd-31f6-4535-bed2-bc6c85a210bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 560.725263] env[62619]: DEBUG nova.compute.provider_tree [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 561.131176] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Releasing lock "refresh_cache-0c828d87-be04-47a4-87f9-a0f54622326b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.131443] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 561.131595] env[62619]: DEBUG nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 561.131766] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 561.151865] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.172736] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.231133] env[62619]: DEBUG nova.scheduler.client.report [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 561.242725] env[62619]: DEBUG nova.network.neutron [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.266786] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.269938] env[62619]: INFO nova.scheduler.client.report [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Deleted allocations for instance 1f94cb8f-7773-4330-ab1e-9ccad7585b07 [ 561.428896] env[62619]: DEBUG nova.network.neutron [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.678196] env[62619]: DEBUG nova.network.neutron [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 561.700371] env[62619]: DEBUG nova.compute.manager [req-e3b0061d-899c-4e2c-a20e-36e77855176a req-164052c6-80d6-4b1b-9eb0-20480045eb3d service nova] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Received event network-changed-f9543b24-561b-4d98-b029-b3a6502c955b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 561.700371] env[62619]: DEBUG nova.compute.manager [req-e3b0061d-899c-4e2c-a20e-36e77855176a req-164052c6-80d6-4b1b-9eb0-20480045eb3d service nova] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Refreshing instance network info cache due to event network-changed-f9543b24-561b-4d98-b029-b3a6502c955b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 561.700371] env[62619]: DEBUG oslo_concurrency.lockutils [req-e3b0061d-899c-4e2c-a20e-36e77855176a req-164052c6-80d6-4b1b-9eb0-20480045eb3d service nova] Acquiring lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.735539] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.893s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.735539] env[62619]: ERROR nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. [ 561.735539] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Traceback (most recent call last): [ 561.735539] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 561.735539] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self.driver.spawn(context, instance, image_meta, [ 561.735539] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 561.735539] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self._vmops.spawn(context, instance, image_meta, injected_files, [ 561.735539] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 561.735539] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] vm_ref = self.build_virtual_machine(instance, [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] vif_infos = vmwarevif.get_vif_info(self._session, [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] for vif in network_info: [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] return self._sync_wrapper(fn, *args, **kwargs) [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self.wait() [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self[:] = self._gt.wait() [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] return self._exit_event.wait() [ 561.735966] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] result = hub.switch() [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] return self.greenlet.switch() [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] result = function(*args, **kwargs) [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] return func(*args, **kwargs) [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] raise e [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] nwinfo = self.network_api.allocate_for_instance( [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 561.736307] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] created_port_ids = self._update_ports_for_instance( [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] with excutils.save_and_reraise_exception(): [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] self.force_reraise() [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] raise self.value [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] updated_port = self._update_port( [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] _ensure_no_port_binding_failure(port) [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 561.736636] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] raise exception.PortBindingFailed(port_id=port['id']) [ 561.736930] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] nova.exception.PortBindingFailed: Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. [ 561.736930] env[62619]: ERROR nova.compute.manager [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] [ 561.736930] env[62619]: DEBUG nova.compute.utils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 561.739544] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.011s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 561.741159] env[62619]: INFO nova.compute.claims [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 561.746688] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Build of instance d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72 was re-scheduled: Binding failed for port 8623168c-a37b-474f-8a2c-e4312275c1c9, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 561.746688] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 561.746895] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Acquiring lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 561.746999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Acquired lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.747162] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 561.780517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Releasing lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.781057] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 561.781322] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 561.781781] env[62619]: DEBUG oslo_concurrency.lockutils [None req-266c03c7-fa75-4383-b059-6b7ead578c6a tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "1f94cb8f-7773-4330-ab1e-9ccad7585b07" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.679s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 561.782009] env[62619]: DEBUG oslo_concurrency.lockutils [req-e3b0061d-899c-4e2c-a20e-36e77855176a req-164052c6-80d6-4b1b-9eb0-20480045eb3d service nova] Acquired lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 561.782229] env[62619]: DEBUG nova.network.neutron [req-e3b0061d-899c-4e2c-a20e-36e77855176a req-164052c6-80d6-4b1b-9eb0-20480045eb3d service nova] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Refreshing network info cache for port f9543b24-561b-4d98-b029-b3a6502c955b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 561.783838] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05e753a5-a76d-4241-a92f-0c70c056244b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.798768] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d61789e-4e95-4465-8634-3fb07db914ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 561.824382] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 606514b7-c9c5-43c1-bc71-5337228373f1 could not be found. [ 561.825550] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 561.825948] env[62619]: INFO nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Took 0.04 seconds to destroy the instance on the hypervisor. [ 561.826149] env[62619]: DEBUG oslo.service.loopingcall [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 561.826374] env[62619]: DEBUG nova.compute.manager [-] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 561.826465] env[62619]: DEBUG nova.network.neutron [-] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 561.858387] env[62619]: DEBUG nova.network.neutron [-] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 561.934606] env[62619]: DEBUG oslo_concurrency.lockutils [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] Releasing lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 561.935057] env[62619]: DEBUG nova.compute.manager [req-84e2cbf5-adf8-455e-9871-7f5f579ab632 req-1589cf83-526c-4df3-81b2-ff979dc279e2 service nova] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Received event network-vif-deleted-4f3424b7-9612-47ac-abc8-a9245b1cf289 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 562.183136] env[62619]: INFO nova.compute.manager [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] [instance: 0c828d87-be04-47a4-87f9-a0f54622326b] Took 1.05 seconds to deallocate network for instance. [ 562.192059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "5680b562-8e8d-4fed-8b48-ec7add23ed7b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.192550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "5680b562-8e8d-4fed-8b48-ec7add23ed7b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 562.291033] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 562.311015] env[62619]: DEBUG nova.network.neutron [req-e3b0061d-899c-4e2c-a20e-36e77855176a req-164052c6-80d6-4b1b-9eb0-20480045eb3d service nova] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.364418] env[62619]: DEBUG nova.network.neutron [-] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.374475] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 562.406246] env[62619]: DEBUG nova.network.neutron [req-e3b0061d-899c-4e2c-a20e-36e77855176a req-164052c6-80d6-4b1b-9eb0-20480045eb3d service nova] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.574902] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 562.823295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.866598] env[62619]: INFO nova.compute.manager [-] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Took 1.04 seconds to deallocate network for instance. [ 562.871963] env[62619]: DEBUG nova.compute.claims [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 562.872371] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 562.909948] env[62619]: DEBUG oslo_concurrency.lockutils [req-e3b0061d-899c-4e2c-a20e-36e77855176a req-164052c6-80d6-4b1b-9eb0-20480045eb3d service nova] Releasing lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 562.998711] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fcb949e-4803-4cd3-a9a1-7c5e75ece6c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.010026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dee02c7-f7cd-442b-b2d3-6a9821a98b12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.038706] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2d9ba4c-595e-4562-bcb2-56cf116e5f5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.047372] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4de0a502-3003-47f9-943f-76fba426803f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 563.065660] env[62619]: DEBUG nova.compute.provider_tree [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 563.079183] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Releasing lock "refresh_cache-d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 563.079427] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 563.079590] env[62619]: DEBUG nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 563.079750] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 563.106283] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 563.230660] env[62619]: INFO nova.scheduler.client.report [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Deleted allocations for instance 0c828d87-be04-47a4-87f9-a0f54622326b [ 563.568964] env[62619]: DEBUG nova.scheduler.client.report [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 563.610030] env[62619]: DEBUG nova.network.neutron [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 563.746169] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7718e1fd-3809-42b7-bc2b-82147bd3e3ee tempest-ServersAdminNegativeTestJSON-570434186 tempest-ServersAdminNegativeTestJSON-570434186-project-member] Lock "0c828d87-be04-47a4-87f9-a0f54622326b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.191s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.074626] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 564.075178] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 564.079629] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.003s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 564.080482] env[62619]: INFO nova.compute.claims [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 564.116724] env[62619]: INFO nova.compute.manager [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] [instance: d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72] Took 1.04 seconds to deallocate network for instance. [ 564.248646] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 564.586898] env[62619]: DEBUG nova.compute.utils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 564.591633] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 564.591812] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 564.685977] env[62619]: DEBUG nova.policy [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3b7610dde5834843b09ca0532f008b61', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3d32a13eb43346e59d1af795496cb6df', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 564.765452] env[62619]: DEBUG nova.compute.manager [req-9778cb6a-6b16-473b-8ae4-1d2bc15609ac req-f2e1f736-f699-4343-8102-781f11f7efbd service nova] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Received event network-vif-deleted-f9543b24-561b-4d98-b029-b3a6502c955b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 564.782802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.093021] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 565.159496] env[62619]: INFO nova.scheduler.client.report [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Deleted allocations for instance d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72 [ 565.429643] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a130ac49-d014-4549-8065-0ec65524a559 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.437389] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-271f6f7d-1dca-443a-8aec-13ea1df8c61a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.473222] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3711a735-b10b-4000-8525-747c6f75db24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.482416] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a04349-6cc6-4461-827c-bd1983cdbf5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 565.499958] env[62619]: DEBUG nova.compute.provider_tree [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 565.551154] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Successfully created port: 5ef6b61a-bbb5-4339-bc27-c098e4e9366f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 565.645931] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 565.646480] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 565.676202] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c15ac49c-fa30-4267-9ccb-e3014c4923ee tempest-ServerDiagnosticsNegativeTest-936481918 tempest-ServerDiagnosticsNegativeTest-936481918-project-member] Lock "d9b2c5c3-bd2b-4ec0-8b3c-e0f671d81c72" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.099s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.006560] env[62619]: DEBUG nova.scheduler.client.report [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 566.113090] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 566.144129] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 566.144565] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 566.144565] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 566.144722] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 566.144881] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 566.145106] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 566.145311] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 566.145502] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 566.145676] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 566.145864] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 566.146088] env[62619]: DEBUG nova.virt.hardware [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 566.146935] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39ab2210-a7b5-4a82-a3be-97de10057883 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.161249] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e25aaec3-c4a4-44ac-9fa6-afe95aa1972b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 566.178045] env[62619]: DEBUG nova.compute.manager [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 566.517053] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.438s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 566.517918] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 566.522650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.415s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 566.707231] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 567.029231] env[62619]: DEBUG nova.compute.utils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 567.036206] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 567.036206] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 567.131266] env[62619]: DEBUG nova.policy [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2e876d6b3b19488dbb04b70ba14e1bf1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33d5117a90534bc8a7e6fb81c77522f9', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 567.284726] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20d4cd5-ad27-43ab-8faa-d82ad73ada88 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.295211] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f92c9312-26a9-430c-9f74-328239a62cd6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.333610] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00ff7440-0aa1-4070-8835-0fb93ab553e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.341666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-459c7f5c-f2bb-4d95-a46f-4062df4791d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 567.357396] env[62619]: DEBUG nova.compute.provider_tree [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 567.511069] env[62619]: ERROR nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. [ 567.511069] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 567.511069] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.511069] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 567.511069] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 567.511069] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 567.511069] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 567.511069] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 567.511069] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.511069] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 567.511069] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.511069] env[62619]: ERROR nova.compute.manager raise self.value [ 567.511069] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 567.511069] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 567.511069] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.511069] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 567.511556] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.511556] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 567.511556] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. [ 567.511556] env[62619]: ERROR nova.compute.manager [ 567.511556] env[62619]: Traceback (most recent call last): [ 567.511556] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 567.511556] env[62619]: listener.cb(fileno) [ 567.511556] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.511556] env[62619]: result = function(*args, **kwargs) [ 567.511556] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 567.511556] env[62619]: return func(*args, **kwargs) [ 567.511556] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.511556] env[62619]: raise e [ 567.511556] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.511556] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 567.511556] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 567.511556] env[62619]: created_port_ids = self._update_ports_for_instance( [ 567.511556] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 567.511556] env[62619]: with excutils.save_and_reraise_exception(): [ 567.511556] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.511556] env[62619]: self.force_reraise() [ 567.511556] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.511556] env[62619]: raise self.value [ 567.511556] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 567.511556] env[62619]: updated_port = self._update_port( [ 567.511556] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.511556] env[62619]: _ensure_no_port_binding_failure(port) [ 567.511556] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.511556] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 567.512654] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. [ 567.512654] env[62619]: Removing descriptor: 17 [ 567.512654] env[62619]: ERROR nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Traceback (most recent call last): [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] yield resources [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self.driver.spawn(context, instance, image_meta, [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 567.512654] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] vm_ref = self.build_virtual_machine(instance, [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] vif_infos = vmwarevif.get_vif_info(self._session, [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] for vif in network_info: [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] return self._sync_wrapper(fn, *args, **kwargs) [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self.wait() [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self[:] = self._gt.wait() [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] return self._exit_event.wait() [ 567.513091] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] result = hub.switch() [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] return self.greenlet.switch() [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] result = function(*args, **kwargs) [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] return func(*args, **kwargs) [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] raise e [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] nwinfo = self.network_api.allocate_for_instance( [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 567.513487] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] created_port_ids = self._update_ports_for_instance( [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] with excutils.save_and_reraise_exception(): [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self.force_reraise() [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] raise self.value [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] updated_port = self._update_port( [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] _ensure_no_port_binding_failure(port) [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 567.514300] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] raise exception.PortBindingFailed(port_id=port['id']) [ 567.514714] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] nova.exception.PortBindingFailed: Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. [ 567.514714] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] [ 567.514714] env[62619]: INFO nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Terminating instance [ 567.516029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Acquiring lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 567.516029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Acquired lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 567.516179] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 567.536925] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 567.711030] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Successfully created port: 66364b7e-51af-4008-8912-08003a4c1ca4 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 567.861323] env[62619]: DEBUG nova.scheduler.client.report [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 568.117146] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.191798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "7f17ce32-e674-4275-a313-a3f69dde2ee9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 568.191798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "7f17ce32-e674-4275-a313-a3f69dde2ee9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.333959] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 568.369020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.846s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 568.370484] env[62619]: ERROR nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Traceback (most recent call last): [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self.driver.spawn(context, instance, image_meta, [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] vm_ref = self.build_virtual_machine(instance, [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] vif_infos = vmwarevif.get_vif_info(self._session, [ 568.370484] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] for vif in network_info: [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] return self._sync_wrapper(fn, *args, **kwargs) [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self.wait() [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self[:] = self._gt.wait() [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] return self._exit_event.wait() [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] result = hub.switch() [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 568.370897] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] return self.greenlet.switch() [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] result = function(*args, **kwargs) [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] return func(*args, **kwargs) [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] raise e [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] nwinfo = self.network_api.allocate_for_instance( [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] created_port_ids = self._update_ports_for_instance( [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] with excutils.save_and_reraise_exception(): [ 568.371278] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] self.force_reraise() [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] raise self.value [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] updated_port = self._update_port( [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] _ensure_no_port_binding_failure(port) [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] raise exception.PortBindingFailed(port_id=port['id']) [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] nova.exception.PortBindingFailed: Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. [ 568.371653] env[62619]: ERROR nova.compute.manager [instance: 88346467-e1ac-4647-bcf7-063636853a2b] [ 568.371987] env[62619]: DEBUG nova.compute.utils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 568.371987] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.402s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 568.374384] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Build of instance 88346467-e1ac-4647-bcf7-063636853a2b was re-scheduled: Binding failed for port 49dfc77a-a416-4d6e-814b-bc9408bcdde0, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 568.374797] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 568.374999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Acquiring lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.375147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Acquired lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.375291] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 568.554417] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 568.580785] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 568.581253] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 568.581253] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 568.581386] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 568.581502] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 568.581835] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 568.582082] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 568.582666] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 568.582872] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 568.583055] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 568.583267] env[62619]: DEBUG nova.virt.hardware [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 568.584139] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a147d813-7494-4829-8290-941f4ec330dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.592517] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2f5772d-c896-48fe-ba78-35bfb2b8addd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.844107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Releasing lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 568.844878] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 568.845159] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 568.845528] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fe084e80-0bbc-4286-8ebb-dca9ca6490d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.857493] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d70500bb-d47c-4530-8c8d-02b71846ccc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 568.888287] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9 could not be found. [ 568.888287] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 568.888287] env[62619]: INFO nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 568.888715] env[62619]: DEBUG oslo.service.loopingcall [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 568.889463] env[62619]: DEBUG nova.compute.manager [-] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 568.889557] env[62619]: DEBUG nova.network.neutron [-] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 568.936497] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.944244] env[62619]: DEBUG nova.network.neutron [-] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 568.996736] env[62619]: DEBUG nova.compute.manager [req-17b6e1e8-b716-40bd-8d34-6fd84db5dcb7 req-40c6f65e-9478-4ee8-a365-7ab80a5148fb service nova] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Received event network-changed-5ef6b61a-bbb5-4339-bc27-c098e4e9366f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 568.996869] env[62619]: DEBUG nova.compute.manager [req-17b6e1e8-b716-40bd-8d34-6fd84db5dcb7 req-40c6f65e-9478-4ee8-a365-7ab80a5148fb service nova] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Refreshing instance network info cache due to event network-changed-5ef6b61a-bbb5-4339-bc27-c098e4e9366f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 568.997078] env[62619]: DEBUG oslo_concurrency.lockutils [req-17b6e1e8-b716-40bd-8d34-6fd84db5dcb7 req-40c6f65e-9478-4ee8-a365-7ab80a5148fb service nova] Acquiring lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 568.997209] env[62619]: DEBUG oslo_concurrency.lockutils [req-17b6e1e8-b716-40bd-8d34-6fd84db5dcb7 req-40c6f65e-9478-4ee8-a365-7ab80a5148fb service nova] Acquired lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 568.997475] env[62619]: DEBUG nova.network.neutron [req-17b6e1e8-b716-40bd-8d34-6fd84db5dcb7 req-40c6f65e-9478-4ee8-a365-7ab80a5148fb service nova] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Refreshing network info cache for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 569.145093] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.207571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2832c1-0bc2-4ad5-acc2-8c1b19de183c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.217701] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2503e083-db73-4b2f-87ee-c91e81772b29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.251661] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de7876e3-726e-473d-97b2-4fef11a3b75f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.259558] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f30b53e4-83e3-4a2f-96b8-22a7cc740dfc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 569.273223] env[62619]: DEBUG nova.compute.provider_tree [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 569.448774] env[62619]: DEBUG nova.network.neutron [-] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.529725] env[62619]: DEBUG nova.network.neutron [req-17b6e1e8-b716-40bd-8d34-6fd84db5dcb7 req-40c6f65e-9478-4ee8-a365-7ab80a5148fb service nova] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.651016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Releasing lock "refresh_cache-88346467-e1ac-4647-bcf7-063636853a2b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 569.651016] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 569.651016] env[62619]: DEBUG nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 569.651016] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 569.684728] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 569.715499] env[62619]: DEBUG nova.network.neutron [req-17b6e1e8-b716-40bd-8d34-6fd84db5dcb7 req-40c6f65e-9478-4ee8-a365-7ab80a5148fb service nova] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 569.776158] env[62619]: DEBUG nova.scheduler.client.report [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 569.789783] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Acquiring lock "b79f2461-2b0f-4427-abb8-7a3a192e6230" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 569.790414] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Lock "b79f2461-2b0f-4427-abb8-7a3a192e6230" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 569.801571] env[62619]: ERROR nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. [ 569.801571] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 569.801571] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.801571] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 569.801571] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 569.801571] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 569.801571] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 569.801571] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 569.801571] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.801571] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 569.801571] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.801571] env[62619]: ERROR nova.compute.manager raise self.value [ 569.801571] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 569.801571] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 569.801571] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.801571] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 569.803316] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.803316] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 569.803316] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. [ 569.803316] env[62619]: ERROR nova.compute.manager [ 569.803316] env[62619]: Traceback (most recent call last): [ 569.803316] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 569.803316] env[62619]: listener.cb(fileno) [ 569.803316] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.803316] env[62619]: result = function(*args, **kwargs) [ 569.803316] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.803316] env[62619]: return func(*args, **kwargs) [ 569.803316] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.803316] env[62619]: raise e [ 569.803316] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.803316] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 569.803316] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 569.803316] env[62619]: created_port_ids = self._update_ports_for_instance( [ 569.803316] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 569.803316] env[62619]: with excutils.save_and_reraise_exception(): [ 569.803316] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.803316] env[62619]: self.force_reraise() [ 569.803316] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.803316] env[62619]: raise self.value [ 569.803316] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 569.803316] env[62619]: updated_port = self._update_port( [ 569.803316] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.803316] env[62619]: _ensure_no_port_binding_failure(port) [ 569.803316] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.803316] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 569.804077] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. [ 569.804077] env[62619]: Removing descriptor: 18 [ 569.804077] env[62619]: ERROR nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Traceback (most recent call last): [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] yield resources [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self.driver.spawn(context, instance, image_meta, [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 569.804077] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] vm_ref = self.build_virtual_machine(instance, [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] vif_infos = vmwarevif.get_vif_info(self._session, [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] for vif in network_info: [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] return self._sync_wrapper(fn, *args, **kwargs) [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self.wait() [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self[:] = self._gt.wait() [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] return self._exit_event.wait() [ 569.804855] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] result = hub.switch() [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] return self.greenlet.switch() [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] result = function(*args, **kwargs) [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] return func(*args, **kwargs) [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] raise e [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] nwinfo = self.network_api.allocate_for_instance( [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 569.805514] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] created_port_ids = self._update_ports_for_instance( [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] with excutils.save_and_reraise_exception(): [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self.force_reraise() [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] raise self.value [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] updated_port = self._update_port( [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] _ensure_no_port_binding_failure(port) [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 569.806061] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] raise exception.PortBindingFailed(port_id=port['id']) [ 569.807558] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] nova.exception.PortBindingFailed: Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. [ 569.807558] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] [ 569.807558] env[62619]: INFO nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Terminating instance [ 569.807558] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Acquiring lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 569.807558] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Acquired lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 569.807558] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 569.950208] env[62619]: INFO nova.compute.manager [-] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Took 1.06 seconds to deallocate network for instance. [ 569.955911] env[62619]: DEBUG nova.compute.claims [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 569.956162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 570.043736] env[62619]: DEBUG nova.compute.manager [req-25570fd0-8c94-49c3-bde8-97d6a34fdf60 req-9daf9924-5894-4d8c-a239-0cd4d7a5acea service nova] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Received event network-changed-66364b7e-51af-4008-8912-08003a4c1ca4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 570.043736] env[62619]: DEBUG nova.compute.manager [req-25570fd0-8c94-49c3-bde8-97d6a34fdf60 req-9daf9924-5894-4d8c-a239-0cd4d7a5acea service nova] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Refreshing instance network info cache due to event network-changed-66364b7e-51af-4008-8912-08003a4c1ca4. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 570.043736] env[62619]: DEBUG oslo_concurrency.lockutils [req-25570fd0-8c94-49c3-bde8-97d6a34fdf60 req-9daf9924-5894-4d8c-a239-0cd4d7a5acea service nova] Acquiring lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.189845] env[62619]: DEBUG nova.network.neutron [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.220502] env[62619]: DEBUG oslo_concurrency.lockutils [req-17b6e1e8-b716-40bd-8d34-6fd84db5dcb7 req-40c6f65e-9478-4ee8-a365-7ab80a5148fb service nova] Releasing lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 570.287244] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.913s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 570.287244] env[62619]: ERROR nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. [ 570.287244] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Traceback (most recent call last): [ 570.287244] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 570.287244] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self.driver.spawn(context, instance, image_meta, [ 570.287244] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 570.287244] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 570.287244] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 570.287244] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] vm_ref = self.build_virtual_machine(instance, [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] vif_infos = vmwarevif.get_vif_info(self._session, [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] for vif in network_info: [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] return self._sync_wrapper(fn, *args, **kwargs) [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self.wait() [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self[:] = self._gt.wait() [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] return self._exit_event.wait() [ 570.287535] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] result = hub.switch() [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] return self.greenlet.switch() [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] result = function(*args, **kwargs) [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] return func(*args, **kwargs) [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] raise e [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] nwinfo = self.network_api.allocate_for_instance( [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 570.287878] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] created_port_ids = self._update_ports_for_instance( [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] with excutils.save_and_reraise_exception(): [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] self.force_reraise() [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] raise self.value [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] updated_port = self._update_port( [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] _ensure_no_port_binding_failure(port) [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 570.288239] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] raise exception.PortBindingFailed(port_id=port['id']) [ 570.288543] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] nova.exception.PortBindingFailed: Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. [ 570.288543] env[62619]: ERROR nova.compute.manager [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] [ 570.288543] env[62619]: DEBUG nova.compute.utils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 570.288543] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 12.280s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 570.291110] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Build of instance 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4 was re-scheduled: Binding failed for port 289bd473-7ed6-482d-9825-dc74f2cb5379, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 570.291110] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 570.291392] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 570.291537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquired lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 570.291693] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 570.335147] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 570.556952] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 570.696025] env[62619]: INFO nova.compute.manager [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] [instance: 88346467-e1ac-4647-bcf7-063636853a2b] Took 1.04 seconds to deallocate network for instance. [ 570.828772] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.039760] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.058352] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Releasing lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.058863] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 571.059101] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 571.059411] env[62619]: DEBUG oslo_concurrency.lockutils [req-25570fd0-8c94-49c3-bde8-97d6a34fdf60 req-9daf9924-5894-4d8c-a239-0cd4d7a5acea service nova] Acquired lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 571.059581] env[62619]: DEBUG nova.network.neutron [req-25570fd0-8c94-49c3-bde8-97d6a34fdf60 req-9daf9924-5894-4d8c-a239-0cd4d7a5acea service nova] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Refreshing network info cache for port 66364b7e-51af-4008-8912-08003a4c1ca4 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 571.060620] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-58390b4f-7939-4128-b626-4aacd76dd350 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.072951] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8001986-804c-498d-95d7-cb405a6e26af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 571.097498] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 72997052-6221-4a7b-abb7-07a7ce87bf3c could not be found. [ 571.097781] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 571.098028] env[62619]: INFO nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 571.098302] env[62619]: DEBUG oslo.service.loopingcall [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 571.098546] env[62619]: DEBUG nova.compute.manager [-] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 571.098669] env[62619]: DEBUG nova.network.neutron [-] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 571.128082] env[62619]: DEBUG nova.network.neutron [-] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.546017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Releasing lock "refresh_cache-5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 571.546017] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 571.546017] env[62619]: DEBUG nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 571.546017] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 571.575088] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.596083] env[62619]: DEBUG nova.network.neutron [req-25570fd0-8c94-49c3-bde8-97d6a34fdf60 req-9daf9924-5894-4d8c-a239-0cd4d7a5acea service nova] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 571.630833] env[62619]: DEBUG nova.network.neutron [-] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.705779] env[62619]: DEBUG nova.compute.manager [req-ce9da1a1-a1e6-4fb5-acce-672fe8d5a742 req-630d52c7-c12d-411d-99c5-5acc4c492416 service nova] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Received event network-vif-deleted-5ef6b61a-bbb5-4339-bc27-c098e4e9366f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 571.743104] env[62619]: DEBUG nova.network.neutron [req-25570fd0-8c94-49c3-bde8-97d6a34fdf60 req-9daf9924-5894-4d8c-a239-0cd4d7a5acea service nova] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 571.747702] env[62619]: INFO nova.scheduler.client.report [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Deleted allocations for instance 88346467-e1ac-4647-bcf7-063636853a2b [ 571.841427] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 88346467-e1ac-4647-bcf7-063636853a2b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 572.079614] env[62619]: DEBUG nova.network.neutron [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 572.133935] env[62619]: INFO nova.compute.manager [-] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Took 1.04 seconds to deallocate network for instance. [ 572.136400] env[62619]: DEBUG nova.compute.claims [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 572.136537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 572.245656] env[62619]: DEBUG oslo_concurrency.lockutils [req-25570fd0-8c94-49c3-bde8-97d6a34fdf60 req-9daf9924-5894-4d8c-a239-0cd4d7a5acea service nova] Releasing lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 572.259531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4281670-6868-4891-b2f5-c82bd392f4e1 tempest-ImagesOneServerNegativeTestJSON-191794322 tempest-ImagesOneServerNegativeTestJSON-191794322-project-member] Lock "88346467-e1ac-4647-bcf7-063636853a2b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.553s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 572.348277] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 572.348556] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 67cf14dd-309a-4abb-b8e0-bc289c7cc845 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 572.348556] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 606514b7-c9c5-43c1-bc71-5337228373f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 572.348674] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 572.348787] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 72997052-6221-4a7b-abb7-07a7ce87bf3c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 572.584861] env[62619]: INFO nova.compute.manager [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4] Took 1.04 seconds to deallocate network for instance. [ 572.685425] env[62619]: DEBUG nova.compute.manager [req-9d78e3f1-770f-4b02-84cd-da50cbe2941d req-dc293886-9858-4484-9309-818d5e16ac0b service nova] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Received event network-vif-deleted-66364b7e-51af-4008-8912-08003a4c1ca4 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 572.761688] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 572.851819] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 748a4c26-6df7-4a4c-b81a-0b3e59a8b936 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 573.301007] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 573.355700] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 5e419be8-cfb9-4819-8e92-873daa313d7a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 573.663616] env[62619]: INFO nova.scheduler.client.report [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Deleted allocations for instance 5094a573-d6b5-4e38-ba60-f7c9bd6cdae4 [ 573.863019] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 89638fb3-c5f4-48a4-948b-fb6220ed1dca has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 574.031735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquiring lock "d7404720-7cf0-41bf-a882-2cb6db2253bc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 574.031735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "d7404720-7cf0-41bf-a882-2cb6db2253bc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 574.188596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-93abef63-903c-4afb-a46d-b79278abe070 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "5094a573-d6b5-4e38-ba60-f7c9bd6cdae4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.745s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 574.366539] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 79a94ed1-1139-4194-8091-00b7b1562330 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 574.694328] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 574.873086] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance d22ac031-3202-457c-8bb7-d557ad0fe9f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 575.226477] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 575.375998] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 5680b562-8e8d-4fed-8b48-ec7add23ed7b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 575.883160] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance b7c425a1-a80d-4a62-a71f-d14fdf638cf7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 576.389797] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 7f17ce32-e674-4275-a313-a3f69dde2ee9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 576.894641] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance b79f2461-2b0f-4427-abb8-7a3a192e6230 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 576.894969] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 576.895299] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 577.150085] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e98ddb-47a9-45d2-9ec8-eee900728239 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.160856] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b659b78a-6ae2-4c5b-9e9b-a4f22edd7fed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.197935] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62b6b33a-b619-4378-927a-b77b6ec86d51 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.207019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d980830-28c7-44aa-a3e2-07185c787871 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 577.221155] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 577.538519] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Acquiring lock "ee06b107-4352-4491-b9bb-7faa7ccb5571" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 577.539410] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Lock "ee06b107-4352-4491-b9bb-7faa7ccb5571" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 577.723835] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 578.230929] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 578.231433] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 7.943s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 578.231546] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.517s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 578.237124] env[62619]: INFO nova.compute.claims [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 579.495810] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1efaf4d2-9751-43b3-b5a8-bddd45d83031 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.503546] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a99c1b-cf42-46d8-85e0-e8c22c78c408 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.538665] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-846c6207-0927-44df-8a89-e495fe278e23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.547402] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc4fde5e-2263-4ef0-a934-fae4fafe0158 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 579.562551] env[62619]: DEBUG nova.compute.provider_tree [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 580.069705] env[62619]: DEBUG nova.scheduler.client.report [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 580.577408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 580.577408] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 580.580056] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.359s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.085344] env[62619]: DEBUG nova.compute.utils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 581.089301] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 581.089301] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 581.269645] env[62619]: DEBUG nova.policy [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '62a6bce87a654938a92bcd264590beb1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '602cd64dc8e24cb4994d90f4d245df8d', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 581.452885] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c838492f-8899-44df-bf0d-dcb551275f53 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.464724] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a34095d-3218-438d-985b-7056534c467b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.509120] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a6d4d0-3655-4364-b6a4-06b3cb6d4b95 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.517500] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca80ec45-7e28-429d-9160-96bc3c7cb1a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 581.533499] env[62619]: DEBUG nova.compute.provider_tree [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 581.540264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Acquiring lock "972e1187-09ee-4703-a3bc-7eb213a5c52e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 581.540361] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Lock "972e1187-09ee-4703-a3bc-7eb213a5c52e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 581.591077] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 582.038198] env[62619]: DEBUG nova.scheduler.client.report [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 582.201179] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Successfully created port: 7964180c-d61d-4aa2-a968-9e61eec0a3c5 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 582.542561] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.962s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 582.542926] env[62619]: ERROR nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Traceback (most recent call last): [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self.driver.spawn(context, instance, image_meta, [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self._vmops.spawn(context, instance, image_meta, injected_files, [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] vm_ref = self.build_virtual_machine(instance, [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] vif_infos = vmwarevif.get_vif_info(self._session, [ 582.542926] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] for vif in network_info: [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] return self._sync_wrapper(fn, *args, **kwargs) [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self.wait() [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self[:] = self._gt.wait() [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] return self._exit_event.wait() [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] result = hub.switch() [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 582.543330] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] return self.greenlet.switch() [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] result = function(*args, **kwargs) [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] return func(*args, **kwargs) [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] raise e [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] nwinfo = self.network_api.allocate_for_instance( [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] created_port_ids = self._update_ports_for_instance( [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] with excutils.save_and_reraise_exception(): [ 582.543838] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] self.force_reraise() [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] raise self.value [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] updated_port = self._update_port( [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] _ensure_no_port_binding_failure(port) [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] raise exception.PortBindingFailed(port_id=port['id']) [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] nova.exception.PortBindingFailed: Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. [ 582.544260] env[62619]: ERROR nova.compute.manager [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] [ 582.544578] env[62619]: DEBUG nova.compute.utils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 582.552358] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 19.723s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 582.552358] env[62619]: INFO nova.compute.claims [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 582.556810] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Build of instance 67cf14dd-309a-4abb-b8e0-bc289c7cc845 was re-scheduled: Binding failed for port 4f3424b7-9612-47ac-abc8-a9245b1cf289, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 582.556810] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 582.556810] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Acquiring lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 582.556810] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Acquired lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 582.556810] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 582.601927] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 582.634460] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 582.634714] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 582.634990] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 582.635129] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 582.635283] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 582.635427] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 582.636419] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 582.636672] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 582.636988] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 582.637194] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 582.637383] env[62619]: DEBUG nova.virt.hardware [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 582.638592] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c173d4dc-0694-4a98-b88f-218a8df2960e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 582.649881] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a210491-9e9e-4348-9429-391ea9983709 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.216818] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 583.420547] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Acquiring lock "4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 583.420667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Lock "4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 583.455947] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 583.878963] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b6bc80b-73ff-46a1-bc5e-191e039915aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.888038] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a88b56-91cf-4e01-8923-7401b0258eff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.937066] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fcd99ce-5219-4739-8c69-6471d2280f27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.948391] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4fdbfdac-3617-4c41-87ad-5c901c54c114 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 583.963452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Releasing lock "refresh_cache-67cf14dd-309a-4abb-b8e0-bc289c7cc845" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 583.964908] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 583.965214] env[62619]: DEBUG nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 583.965399] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 583.967569] env[62619]: DEBUG nova.compute.provider_tree [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 584.039247] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 584.470976] env[62619]: DEBUG nova.scheduler.client.report [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 584.546228] env[62619]: DEBUG nova.network.neutron [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 584.646236] env[62619]: ERROR nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. [ 584.646236] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 584.646236] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.646236] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 584.646236] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.646236] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 584.646236] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.646236] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 584.646236] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.646236] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 584.646236] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.646236] env[62619]: ERROR nova.compute.manager raise self.value [ 584.646236] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.646236] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 584.646236] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.646236] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 584.646713] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.646713] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 584.646713] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. [ 584.646713] env[62619]: ERROR nova.compute.manager [ 584.646713] env[62619]: Traceback (most recent call last): [ 584.646713] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 584.646713] env[62619]: listener.cb(fileno) [ 584.646713] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.646713] env[62619]: result = function(*args, **kwargs) [ 584.646713] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 584.646713] env[62619]: return func(*args, **kwargs) [ 584.646713] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.646713] env[62619]: raise e [ 584.646713] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.646713] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 584.646713] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.646713] env[62619]: created_port_ids = self._update_ports_for_instance( [ 584.646713] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.646713] env[62619]: with excutils.save_and_reraise_exception(): [ 584.646713] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.646713] env[62619]: self.force_reraise() [ 584.646713] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.646713] env[62619]: raise self.value [ 584.646713] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.646713] env[62619]: updated_port = self._update_port( [ 584.646713] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.646713] env[62619]: _ensure_no_port_binding_failure(port) [ 584.646713] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.646713] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 584.647552] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. [ 584.647552] env[62619]: Removing descriptor: 18 [ 584.647552] env[62619]: ERROR nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Traceback (most recent call last): [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] yield resources [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self.driver.spawn(context, instance, image_meta, [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self._vmops.spawn(context, instance, image_meta, injected_files, [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 584.647552] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] vm_ref = self.build_virtual_machine(instance, [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] vif_infos = vmwarevif.get_vif_info(self._session, [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] for vif in network_info: [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] return self._sync_wrapper(fn, *args, **kwargs) [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self.wait() [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self[:] = self._gt.wait() [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] return self._exit_event.wait() [ 584.647935] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] result = hub.switch() [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] return self.greenlet.switch() [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] result = function(*args, **kwargs) [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] return func(*args, **kwargs) [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] raise e [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] nwinfo = self.network_api.allocate_for_instance( [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 584.648302] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] created_port_ids = self._update_ports_for_instance( [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] with excutils.save_and_reraise_exception(): [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self.force_reraise() [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] raise self.value [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] updated_port = self._update_port( [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] _ensure_no_port_binding_failure(port) [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 584.648651] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] raise exception.PortBindingFailed(port_id=port['id']) [ 584.648999] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] nova.exception.PortBindingFailed: Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. [ 584.648999] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] [ 584.648999] env[62619]: INFO nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Terminating instance [ 584.650721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Acquiring lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.650930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Acquired lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 584.651148] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 584.765063] env[62619]: DEBUG nova.compute.manager [req-c2430c41-5ee4-4e83-b40d-4600b189052b req-94faa0f2-0a78-4fb8-aa5c-8f2a501bc9ce service nova] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Received event network-changed-7964180c-d61d-4aa2-a968-9e61eec0a3c5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 584.765330] env[62619]: DEBUG nova.compute.manager [req-c2430c41-5ee4-4e83-b40d-4600b189052b req-94faa0f2-0a78-4fb8-aa5c-8f2a501bc9ce service nova] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Refreshing instance network info cache due to event network-changed-7964180c-d61d-4aa2-a968-9e61eec0a3c5. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 584.765927] env[62619]: DEBUG oslo_concurrency.lockutils [req-c2430c41-5ee4-4e83-b40d-4600b189052b req-94faa0f2-0a78-4fb8-aa5c-8f2a501bc9ce service nova] Acquiring lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 584.837417] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 584.838017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 584.978911] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.430s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 584.978911] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 584.979957] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.108s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.046957] env[62619]: INFO nova.compute.manager [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] [instance: 67cf14dd-309a-4abb-b8e0-bc289c7cc845] Took 1.08 seconds to deallocate network for instance. [ 585.169311] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.227080] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 585.486066] env[62619]: DEBUG nova.compute.utils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 585.492556] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 585.495591] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 585.560733] env[62619]: DEBUG nova.policy [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '866a4ed5b24043cfa965eb0d4191e438', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '09c13dc98d6d4f32b071164570910416', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 585.730125] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Releasing lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 585.733986] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 585.733986] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 585.733986] env[62619]: DEBUG oslo_concurrency.lockutils [req-c2430c41-5ee4-4e83-b40d-4600b189052b req-94faa0f2-0a78-4fb8-aa5c-8f2a501bc9ce service nova] Acquired lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 585.733986] env[62619]: DEBUG nova.network.neutron [req-c2430c41-5ee4-4e83-b40d-4600b189052b req-94faa0f2-0a78-4fb8-aa5c-8f2a501bc9ce service nova] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Refreshing network info cache for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 585.733986] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aa2f2667-c2f9-42f1-bb8b-50d30bef1064 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.754593] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49e6ffc6-73d8-4f4a-a3cb-b6f7db0f6122 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.782419] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 748a4c26-6df7-4a4c-b81a-0b3e59a8b936 could not be found. [ 585.782673] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 585.782849] env[62619]: INFO nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Took 0.05 seconds to destroy the instance on the hypervisor. [ 585.783102] env[62619]: DEBUG oslo.service.loopingcall [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 585.783307] env[62619]: DEBUG nova.compute.manager [-] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 585.783396] env[62619]: DEBUG nova.network.neutron [-] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 585.802260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "6f32ec6e-af29-4c0e-8f8b-708cbd1af474" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 585.802411] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "6f32ec6e-af29-4c0e-8f8b-708cbd1af474" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 585.809078] env[62619]: DEBUG nova.network.neutron [-] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 585.847832] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-422b12ba-dd94-47ec-a17a-38a08b847148 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.859286] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e88cddc-5527-434d-a39f-62b92e1bc101 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.902192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7e10615-f920-4230-9856-8336d43d57d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.911110] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8afadbb1-48d3-41ae-9053-b26766e6e7b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 585.929056] env[62619]: DEBUG nova.compute.provider_tree [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 585.993133] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 586.004013] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Successfully created port: 9b83d46d-092a-484d-876f-43532f8afecf {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 586.091800] env[62619]: INFO nova.scheduler.client.report [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Deleted allocations for instance 67cf14dd-309a-4abb-b8e0-bc289c7cc845 [ 586.280742] env[62619]: DEBUG nova.network.neutron [req-c2430c41-5ee4-4e83-b40d-4600b189052b req-94faa0f2-0a78-4fb8-aa5c-8f2a501bc9ce service nova] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 586.296109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "e145f3d4-fb6e-4c14-abdf-d85596fb0218" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.296347] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "e145f3d4-fb6e-4c14-abdf-d85596fb0218" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.309993] env[62619]: DEBUG nova.network.neutron [-] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.418466] env[62619]: DEBUG nova.network.neutron [req-c2430c41-5ee4-4e83-b40d-4600b189052b req-94faa0f2-0a78-4fb8-aa5c-8f2a501bc9ce service nova] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 586.436156] env[62619]: DEBUG nova.scheduler.client.report [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 586.600821] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b074cb1-f1c2-46f6-ab73-198905ffb3e3 tempest-ServersTestJSON-1717828494 tempest-ServersTestJSON-1717828494-project-member] Lock "67cf14dd-309a-4abb-b8e0-bc289c7cc845" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.024s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.812401] env[62619]: INFO nova.compute.manager [-] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Took 1.03 seconds to deallocate network for instance. [ 586.818623] env[62619]: DEBUG nova.compute.claims [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 586.818795] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 586.922770] env[62619]: DEBUG oslo_concurrency.lockutils [req-c2430c41-5ee4-4e83-b40d-4600b189052b req-94faa0f2-0a78-4fb8-aa5c-8f2a501bc9ce service nova] Releasing lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 586.943704] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.964s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 586.944374] env[62619]: ERROR nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Traceback (most recent call last): [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self.driver.spawn(context, instance, image_meta, [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] vm_ref = self.build_virtual_machine(instance, [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 586.944374] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] for vif in network_info: [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] return self._sync_wrapper(fn, *args, **kwargs) [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self.wait() [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self[:] = self._gt.wait() [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] return self._exit_event.wait() [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] result = hub.switch() [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 586.944737] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] return self.greenlet.switch() [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] result = function(*args, **kwargs) [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] return func(*args, **kwargs) [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] raise e [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] nwinfo = self.network_api.allocate_for_instance( [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] created_port_ids = self._update_ports_for_instance( [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] with excutils.save_and_reraise_exception(): [ 586.945094] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] self.force_reraise() [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] raise self.value [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] updated_port = self._update_port( [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] _ensure_no_port_binding_failure(port) [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] raise exception.PortBindingFailed(port_id=port['id']) [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] nova.exception.PortBindingFailed: Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. [ 586.945444] env[62619]: ERROR nova.compute.manager [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] [ 586.945740] env[62619]: DEBUG nova.compute.utils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 586.946708] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.165s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 586.948300] env[62619]: INFO nova.compute.claims [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 586.951356] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Build of instance 606514b7-c9c5-43c1-bc71-5337228373f1 was re-scheduled: Binding failed for port f9543b24-561b-4d98-b029-b3a6502c955b, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 586.953025] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 586.953295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Acquiring lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 586.953764] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Acquired lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 586.954021] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 587.007658] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 587.031908] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:59:36Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='452297526',id=22,is_public=True,memory_mb=192,name='tempest-flavor_with_ephemeral_0-778476081',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 587.032123] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 587.032288] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 587.032471] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 587.032603] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 587.032741] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 587.032957] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 587.034430] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 587.034626] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 587.034795] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 587.034968] env[62619]: DEBUG nova.virt.hardware [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 587.039272] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa88ace-763c-403d-b66c-4dd008aaa682 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.049795] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c05bbaf-3a24-48dc-b90d-b788d912b059 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 587.104573] env[62619]: DEBUG nova.compute.manager [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 587.487115] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 587.593738] env[62619]: DEBUG nova.compute.manager [req-4d8ff943-34a7-4eac-aa9d-92e01ad17de5 req-82eab9d5-e3f3-43eb-afe7-f89543156de6 service nova] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Received event network-vif-deleted-7964180c-d61d-4aa2-a968-9e61eec0a3c5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 587.612120] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 587.639880] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.735479] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 587.735778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 588.116754] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Releasing lock "refresh_cache-606514b7-c9c5-43c1-bc71-5337228373f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 588.117023] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 588.117256] env[62619]: DEBUG nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 588.117371] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 588.166296] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 588.297725] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a9db670-5a4a-414a-b2ff-e152d5319428 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.308797] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5fa2736-3a85-4655-9998-dcadebb01050 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.341550] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-466dae05-e4eb-4d98-bbc5-25e96352023e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.349240] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe15711-5ccc-49f6-9ca0-34a61689b213 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 588.362575] env[62619]: DEBUG nova.compute.provider_tree [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 588.441964] env[62619]: ERROR nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. [ 588.441964] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 588.441964] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.441964] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 588.441964] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.441964] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 588.441964] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.441964] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 588.441964] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.441964] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 588.441964] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.441964] env[62619]: ERROR nova.compute.manager raise self.value [ 588.441964] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.441964] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 588.441964] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.441964] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 588.445339] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.445339] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 588.445339] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. [ 588.445339] env[62619]: ERROR nova.compute.manager [ 588.445339] env[62619]: Traceback (most recent call last): [ 588.445339] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 588.445339] env[62619]: listener.cb(fileno) [ 588.445339] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.445339] env[62619]: result = function(*args, **kwargs) [ 588.445339] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 588.445339] env[62619]: return func(*args, **kwargs) [ 588.445339] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.445339] env[62619]: raise e [ 588.445339] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.445339] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 588.445339] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.445339] env[62619]: created_port_ids = self._update_ports_for_instance( [ 588.445339] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.445339] env[62619]: with excutils.save_and_reraise_exception(): [ 588.445339] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.445339] env[62619]: self.force_reraise() [ 588.445339] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.445339] env[62619]: raise self.value [ 588.445339] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.445339] env[62619]: updated_port = self._update_port( [ 588.445339] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.445339] env[62619]: _ensure_no_port_binding_failure(port) [ 588.445339] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.445339] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 588.446130] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. [ 588.446130] env[62619]: Removing descriptor: 18 [ 588.446130] env[62619]: ERROR nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Traceback (most recent call last): [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] yield resources [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self.driver.spawn(context, instance, image_meta, [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 588.446130] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] vm_ref = self.build_virtual_machine(instance, [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] vif_infos = vmwarevif.get_vif_info(self._session, [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] for vif in network_info: [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] return self._sync_wrapper(fn, *args, **kwargs) [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self.wait() [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self[:] = self._gt.wait() [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] return self._exit_event.wait() [ 588.446510] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] result = hub.switch() [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] return self.greenlet.switch() [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] result = function(*args, **kwargs) [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] return func(*args, **kwargs) [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] raise e [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] nwinfo = self.network_api.allocate_for_instance( [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 588.446962] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] created_port_ids = self._update_ports_for_instance( [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] with excutils.save_and_reraise_exception(): [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self.force_reraise() [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] raise self.value [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] updated_port = self._update_port( [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] _ensure_no_port_binding_failure(port) [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 588.448162] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] raise exception.PortBindingFailed(port_id=port['id']) [ 588.448758] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] nova.exception.PortBindingFailed: Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. [ 588.448758] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] [ 588.448758] env[62619]: INFO nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Terminating instance [ 588.448758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Acquiring lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 588.448758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Acquired lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 588.448758] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 588.674339] env[62619]: DEBUG nova.network.neutron [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 588.866133] env[62619]: DEBUG nova.scheduler.client.report [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 588.976664] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.175805] env[62619]: INFO nova.compute.manager [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] [instance: 606514b7-c9c5-43c1-bc71-5337228373f1] Took 1.06 seconds to deallocate network for instance. [ 589.178687] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 589.373267] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.426s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 589.374068] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 589.379148] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.672s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.382698] env[62619]: INFO nova.compute.claims [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 589.565656] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 589.566010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 589.688670] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Releasing lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 589.689592] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 589.689592] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 589.690037] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-35bf122a-6448-44a0-b3b9-5cbf5585dd2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.701731] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec03fc93-782d-4ab2-a2a5-b615c1128232 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 589.731875] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5e419be8-cfb9-4819-8e92-873daa313d7a could not be found. [ 589.732250] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 589.732420] env[62619]: INFO nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Took 0.04 seconds to destroy the instance on the hypervisor. [ 589.732902] env[62619]: DEBUG oslo.service.loopingcall [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 589.733226] env[62619]: DEBUG nova.compute.manager [-] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 589.733354] env[62619]: DEBUG nova.network.neutron [-] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 589.817082] env[62619]: DEBUG nova.network.neutron [-] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 589.890789] env[62619]: DEBUG nova.compute.utils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 589.892341] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 589.892489] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 590.025079] env[62619]: DEBUG nova.policy [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3dcdddecbd124aaa9646a6ad3b1809f3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c39e2e9aa96248ea9e0dfa5b33b1fe49', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 590.319196] env[62619]: DEBUG nova.network.neutron [-] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 590.343782] env[62619]: INFO nova.scheduler.client.report [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Deleted allocations for instance 606514b7-c9c5-43c1-bc71-5337228373f1 [ 590.399018] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 590.451412] env[62619]: DEBUG nova.compute.manager [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Received event network-changed-9b83d46d-092a-484d-876f-43532f8afecf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 590.451412] env[62619]: DEBUG nova.compute.manager [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Refreshing instance network info cache due to event network-changed-9b83d46d-092a-484d-876f-43532f8afecf. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 590.451549] env[62619]: DEBUG oslo_concurrency.lockutils [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] Acquiring lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 590.452848] env[62619]: DEBUG oslo_concurrency.lockutils [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] Acquired lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 590.453616] env[62619]: DEBUG nova.network.neutron [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Refreshing network info cache for port 9b83d46d-092a-484d-876f-43532f8afecf {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 590.791387] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9fb5e79-9c07-462f-a273-d0ab2cce89c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.807256] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50ac2ac8-216f-430c-8e3e-8a894ac975a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.838246] env[62619]: INFO nova.compute.manager [-] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Took 1.10 seconds to deallocate network for instance. [ 590.840765] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a691ec43-302c-4905-a7cc-07b6c2381e25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.846183] env[62619]: DEBUG nova.compute.claims [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 590.846183] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 590.850079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49021c24-1c7f-475a-becf-0a9cd188a534 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 590.863513] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a0daff05-7f48-4e3f-9366-edbb54b17ebd tempest-ServersTestManualDisk-1177164087 tempest-ServersTestManualDisk-1177164087-project-member] Lock "606514b7-c9c5-43c1-bc71-5337228373f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 46.152s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 590.864637] env[62619]: DEBUG nova.compute.provider_tree [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 590.994081] env[62619]: DEBUG nova.network.neutron [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 591.109452] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Successfully created port: 73faf8f2-a3d7-42da-85b0-667f0caef6f0 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 591.279108] env[62619]: DEBUG nova.network.neutron [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 591.367512] env[62619]: DEBUG nova.scheduler.client.report [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 591.370797] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 591.409862] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 591.443922] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 591.444238] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 591.444499] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 591.444583] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 591.444721] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 591.444864] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 591.445085] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 591.445246] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 591.445409] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 591.445569] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 591.445738] env[62619]: DEBUG nova.virt.hardware [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 591.446652] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d6699ad-531b-4553-992a-dd2a77270c47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.455628] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4afb536-3614-440b-93b9-5aed0897b842 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 591.784665] env[62619]: DEBUG oslo_concurrency.lockutils [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] Releasing lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 591.784665] env[62619]: DEBUG nova.compute.manager [req-852c06c0-c83f-4e0b-a96c-88b4b45ea93c req-6b934fc5-8b03-4358-ba05-9f5a7f03b1ff service nova] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Received event network-vif-deleted-9b83d46d-092a-484d-876f-43532f8afecf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 591.874968] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.496s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 591.875550] env[62619]: DEBUG nova.compute.manager [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 591.881045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.925s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 591.905031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 592.383171] env[62619]: DEBUG nova.compute.utils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 592.388078] env[62619]: DEBUG nova.compute.manager [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 592.702540] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68a320d6-4caa-4595-91f7-cd3762fc4b07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.710979] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f7911b2-82e7-48f4-9458-c0aa523a0242 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.745161] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a56bba1b-4faa-4afa-9d8e-890f7e83ab6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.750294] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf6372e-4650-4bb1-81e5-5fddc4d8def6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 592.763901] env[62619]: DEBUG nova.compute.provider_tree [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 592.890497] env[62619]: DEBUG nova.compute.manager [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 593.163280] env[62619]: ERROR nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. [ 593.163280] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 593.163280] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.163280] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 593.163280] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 593.163280] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 593.163280] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 593.163280] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 593.163280] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.163280] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 593.163280] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.163280] env[62619]: ERROR nova.compute.manager raise self.value [ 593.163280] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 593.163280] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 593.163280] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.163280] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 593.164152] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.164152] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 593.164152] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. [ 593.164152] env[62619]: ERROR nova.compute.manager [ 593.164152] env[62619]: Traceback (most recent call last): [ 593.164152] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 593.164152] env[62619]: listener.cb(fileno) [ 593.164152] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.164152] env[62619]: result = function(*args, **kwargs) [ 593.164152] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.164152] env[62619]: return func(*args, **kwargs) [ 593.164152] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.164152] env[62619]: raise e [ 593.164152] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.164152] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 593.164152] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 593.164152] env[62619]: created_port_ids = self._update_ports_for_instance( [ 593.164152] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 593.164152] env[62619]: with excutils.save_and_reraise_exception(): [ 593.164152] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.164152] env[62619]: self.force_reraise() [ 593.164152] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.164152] env[62619]: raise self.value [ 593.164152] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 593.164152] env[62619]: updated_port = self._update_port( [ 593.164152] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.164152] env[62619]: _ensure_no_port_binding_failure(port) [ 593.164152] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.164152] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 593.166288] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. [ 593.166288] env[62619]: Removing descriptor: 18 [ 593.166288] env[62619]: ERROR nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Traceback (most recent call last): [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] yield resources [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self.driver.spawn(context, instance, image_meta, [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.166288] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] vm_ref = self.build_virtual_machine(instance, [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] for vif in network_info: [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] return self._sync_wrapper(fn, *args, **kwargs) [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self.wait() [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self[:] = self._gt.wait() [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] return self._exit_event.wait() [ 593.167125] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] result = hub.switch() [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] return self.greenlet.switch() [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] result = function(*args, **kwargs) [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] return func(*args, **kwargs) [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] raise e [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] nwinfo = self.network_api.allocate_for_instance( [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 593.168349] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] created_port_ids = self._update_ports_for_instance( [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] with excutils.save_and_reraise_exception(): [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self.force_reraise() [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] raise self.value [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] updated_port = self._update_port( [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] _ensure_no_port_binding_failure(port) [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.170323] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] raise exception.PortBindingFailed(port_id=port['id']) [ 593.170818] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] nova.exception.PortBindingFailed: Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. [ 593.170818] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] [ 593.170818] env[62619]: INFO nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Terminating instance [ 593.170818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Acquiring lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.170818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Acquired lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.170818] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 593.267135] env[62619]: DEBUG nova.scheduler.client.report [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 593.500323] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "500c7408-7c73-4111-8d96-9090416e73f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.501063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "500c7408-7c73-4111-8d96-9090416e73f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.635194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "aa89e902-7394-49d5-b6aa-8e9d11548cc5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 593.635194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "aa89e902-7394-49d5-b6aa-8e9d11548cc5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.699832] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 593.773382] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.892s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 593.774033] env[62619]: ERROR nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Traceback (most recent call last): [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self.driver.spawn(context, instance, image_meta, [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] vm_ref = self.build_virtual_machine(instance, [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] vif_infos = vmwarevif.get_vif_info(self._session, [ 593.774033] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] for vif in network_info: [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] return self._sync_wrapper(fn, *args, **kwargs) [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self.wait() [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self[:] = self._gt.wait() [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] return self._exit_event.wait() [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] result = hub.switch() [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 593.774485] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] return self.greenlet.switch() [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] result = function(*args, **kwargs) [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] return func(*args, **kwargs) [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] raise e [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] nwinfo = self.network_api.allocate_for_instance( [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] created_port_ids = self._update_ports_for_instance( [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] with excutils.save_and_reraise_exception(): [ 593.774922] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] self.force_reraise() [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] raise self.value [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] updated_port = self._update_port( [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] _ensure_no_port_binding_failure(port) [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] raise exception.PortBindingFailed(port_id=port['id']) [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] nova.exception.PortBindingFailed: Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. [ 593.775307] env[62619]: ERROR nova.compute.manager [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] [ 593.775647] env[62619]: DEBUG nova.compute.utils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 593.776629] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 21.639s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 593.780623] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Build of instance 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9 was re-scheduled: Binding failed for port 5ef6b61a-bbb5-4339-bc27-c098e4e9366f, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 593.781083] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 593.781400] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Acquiring lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.781447] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Acquired lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 593.781988] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 593.816170] env[62619]: DEBUG nova.compute.manager [req-a9ea0064-4d40-44e1-898d-84f947d7cccd req-397361f8-9d42-433e-8e77-430e32121a77 service nova] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Received event network-changed-73faf8f2-a3d7-42da-85b0-667f0caef6f0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 593.816170] env[62619]: DEBUG nova.compute.manager [req-a9ea0064-4d40-44e1-898d-84f947d7cccd req-397361f8-9d42-433e-8e77-430e32121a77 service nova] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Refreshing instance network info cache due to event network-changed-73faf8f2-a3d7-42da-85b0-667f0caef6f0. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 593.816170] env[62619]: DEBUG oslo_concurrency.lockutils [req-a9ea0064-4d40-44e1-898d-84f947d7cccd req-397361f8-9d42-433e-8e77-430e32121a77 service nova] Acquiring lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 593.831677] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 593.911019] env[62619]: DEBUG nova.compute.manager [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 593.941696] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 593.941696] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 593.941696] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 593.941941] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 593.941941] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 593.942056] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 593.942795] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 593.942975] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 593.943159] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 593.943323] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 593.943490] env[62619]: DEBUG nova.virt.hardware [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 593.944388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1e524fe-6dcf-43f0-96ee-77ad0ddcd8b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.954394] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb67a945-0e07-46c2-a883-990dd0e93762 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.970687] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 593.980675] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.981790] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a641a8d9-1e39-4066-bcec-d9b9215cac1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 593.997018] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Created folder: OpenStack in parent group-v4. [ 593.997018] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Creating folder: Project (f972fe2d0cbf447e80cf3e306f595159). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 593.997018] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-2023f4fd-2288-49be-89b1-2489ead5db43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.007030] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Created folder: Project (f972fe2d0cbf447e80cf3e306f595159) in parent group-v290436. [ 594.007030] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Creating folder: Instances. Parent ref: group-v290437. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 594.007030] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-612e1d8b-b832-4778-b191-391303643fef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.016631] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Created folder: Instances in parent group-v290437. [ 594.016842] env[62619]: DEBUG oslo.service.loopingcall [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.017041] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 594.017458] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9e65406e-f3d9-4ec1-abe9-6ac6cabbce16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.033173] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 594.033173] env[62619]: value = "task-1364779" [ 594.033173] env[62619]: _type = "Task" [ 594.033173] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.040975] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364779, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.321838] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.333905] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Releasing lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.334439] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 594.334644] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 594.335165] env[62619]: DEBUG oslo_concurrency.lockutils [req-a9ea0064-4d40-44e1-898d-84f947d7cccd req-397361f8-9d42-433e-8e77-430e32121a77 service nova] Acquired lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.335430] env[62619]: DEBUG nova.network.neutron [req-a9ea0064-4d40-44e1-898d-84f947d7cccd req-397361f8-9d42-433e-8e77-430e32121a77 service nova] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Refreshing network info cache for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 594.337532] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c11fb626-7486-4011-9df1-256bdf44083d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.350355] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-682b0afd-2935-4b72-a866-1dbb9ef0ae61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.382801] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 89638fb3-c5f4-48a4-948b-fb6220ed1dca could not be found. [ 594.383882] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 594.384092] env[62619]: INFO nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Took 0.05 seconds to destroy the instance on the hypervisor. [ 594.384228] env[62619]: DEBUG oslo.service.loopingcall [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 594.389481] env[62619]: DEBUG nova.compute.manager [-] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 594.389619] env[62619]: DEBUG nova.network.neutron [-] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 594.419630] env[62619]: DEBUG nova.network.neutron [-] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.460704] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.545085] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364779, 'name': CreateVM_Task, 'duration_secs': 0.394462} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 594.547673] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 594.550287] env[62619]: DEBUG oslo_vmware.service [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-092989d6-7ad0-4f7d-9a00-f3f59b1c5113 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.554984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 594.555163] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 594.556138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 594.556138] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-298745ee-c225-4540-ac08-652bceba01c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.567584] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 594.567584] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522c7e57-f864-f9ed-0037-4c0bc10ad34e" [ 594.567584] env[62619]: _type = "Task" [ 594.567584] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 594.571092] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522c7e57-f864-f9ed-0037-4c0bc10ad34e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 594.724956] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1585bb4-76fd-4ef0-9bfb-0afde8333177 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.733522] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faa7e32b-a75c-4377-82a7-4a9176055b53 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.770483] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521db648-3170-4bf7-ab56-5b4f56fa3b54 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.778600] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d60bc99-9b5e-4dd2-844d-e53753889e19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 594.792381] env[62619]: DEBUG nova.compute.provider_tree [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 594.858819] env[62619]: DEBUG nova.network.neutron [req-a9ea0064-4d40-44e1-898d-84f947d7cccd req-397361f8-9d42-433e-8e77-430e32121a77 service nova] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 594.923155] env[62619]: DEBUG nova.network.neutron [-] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.935718] env[62619]: DEBUG nova.network.neutron [req-a9ea0064-4d40-44e1-898d-84f947d7cccd req-397361f8-9d42-433e-8e77-430e32121a77 service nova] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 594.963378] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Releasing lock "refresh_cache-98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 594.963615] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 594.963823] env[62619]: DEBUG nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 594.965235] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 594.989798] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 595.076022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.077627] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 595.078168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.078214] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.078639] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 595.078910] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e9a82e41-3be2-4f2e-81ba-6954001780e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.098078] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 595.098293] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 595.099092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4f94569-3107-4887-816b-17cb2e992382 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.106514] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-571eb6d8-0165-42a9-8644-554e89fa303a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.121022] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 595.121022] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a58ec1-9265-ec48-49f4-0fcfc5353b32" [ 595.121022] env[62619]: _type = "Task" [ 595.121022] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 595.127303] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a58ec1-9265-ec48-49f4-0fcfc5353b32, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 595.241332] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "06b595af-8ca9-444a-974c-135bf87a2ec5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.241332] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "06b595af-8ca9-444a-974c-135bf87a2ec5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.299027] env[62619]: DEBUG nova.scheduler.client.report [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 595.426361] env[62619]: INFO nova.compute.manager [-] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Took 1.04 seconds to deallocate network for instance. [ 595.432324] env[62619]: DEBUG nova.compute.claims [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 595.432324] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 595.437848] env[62619]: DEBUG oslo_concurrency.lockutils [req-a9ea0064-4d40-44e1-898d-84f947d7cccd req-397361f8-9d42-433e-8e77-430e32121a77 service nova] Releasing lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 595.492602] env[62619]: DEBUG nova.network.neutron [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 595.632875] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 595.633299] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Creating directory with path [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 595.633674] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e11eabff-e03e-43ed-aa17-04c122a58390 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.658915] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Created directory with path [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 595.659160] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Fetch image to [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 595.659334] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Downloading image file data a3c7be48-0721-419b-bbd6-8b4cc36c5604 to [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk on the data store datastore2 {{(pid=62619) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 595.660109] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae68690-e381-4fee-9404-f79bc53195ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.673793] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913d4d29-800e-4fc1-b221-b1adf1e76e62 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.686839] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769b3325-49ec-4ab0-8bfc-5efdd7b08a17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.719331] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa9e6e5d-3a4b-4090-80c8-149981a16f63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.727718] env[62619]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-63661e6b-2f6a-4810-8b84-6aa8eb0ccb91 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 595.802844] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.027s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 595.803717] env[62619]: ERROR nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Traceback (most recent call last): [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self.driver.spawn(context, instance, image_meta, [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] vm_ref = self.build_virtual_machine(instance, [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] vif_infos = vmwarevif.get_vif_info(self._session, [ 595.803717] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] for vif in network_info: [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] return self._sync_wrapper(fn, *args, **kwargs) [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self.wait() [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self[:] = self._gt.wait() [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] return self._exit_event.wait() [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] result = hub.switch() [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 595.804200] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] return self.greenlet.switch() [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] result = function(*args, **kwargs) [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] return func(*args, **kwargs) [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] raise e [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] nwinfo = self.network_api.allocate_for_instance( [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] created_port_ids = self._update_ports_for_instance( [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] with excutils.save_and_reraise_exception(): [ 595.804667] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] self.force_reraise() [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] raise self.value [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] updated_port = self._update_port( [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] _ensure_no_port_binding_failure(port) [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] raise exception.PortBindingFailed(port_id=port['id']) [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] nova.exception.PortBindingFailed: Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. [ 595.805138] env[62619]: ERROR nova.compute.manager [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] [ 595.805546] env[62619]: DEBUG nova.compute.utils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 595.806170] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Build of instance 72997052-6221-4a7b-abb7-07a7ce87bf3c was re-scheduled: Binding failed for port 66364b7e-51af-4008-8912-08003a4c1ca4, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 595.806599] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 595.806826] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Acquiring lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 595.806973] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Acquired lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 595.807142] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 595.809194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.508s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 595.810850] env[62619]: INFO nova.compute.claims [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 595.825973] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Downloading image file data a3c7be48-0721-419b-bbd6-8b4cc36c5604 to the data store datastore2 {{(pid=62619) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 595.905665] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 595.966720] env[62619]: DEBUG nova.compute.manager [req-83516c7a-78ff-413b-8a8f-87e37da556a9 req-d2e7d80a-9405-4683-ae7c-44e7f4229507 service nova] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Received event network-vif-deleted-73faf8f2-a3d7-42da-85b0-667f0caef6f0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 595.997358] env[62619]: INFO nova.compute.manager [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] [instance: 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9] Took 1.03 seconds to deallocate network for instance. [ 596.351904] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 596.540048] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 596.540358] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore2. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 596.545252] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 596.700576] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Downloaded image file data a3c7be48-0721-419b-bbd6-8b4cc36c5604 to vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk on the data store datastore2 {{(pid=62619) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 596.702480] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 596.702746] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Copying Virtual Disk [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk to [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 596.703063] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-92909099-c709-4c12-9a36-c29945b2fdf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 596.714028] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 596.714028] env[62619]: value = "task-1364780" [ 596.714028] env[62619]: _type = "Task" [ 596.714028] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 596.722591] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364780, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.044868] env[62619]: INFO nova.scheduler.client.report [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Deleted allocations for instance 98b43e0d-87cd-4be8-87b3-3b94ebc97cf9 [ 597.054016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Releasing lock "refresh_cache-72997052-6221-4a7b-abb7-07a7ce87bf3c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 597.054016] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 597.054016] env[62619]: DEBUG nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 597.054016] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 597.107871] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 597.228038] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364780, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.282907] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4099f552-1339-456c-81af-ea83a78700b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.293830] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f0156ac-ee75-444a-8ab1-52206046ce2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.327941] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5d4d5e3-bc07-4d92-ba00-e9e7115b4bde {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.336328] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3fdf1be-06df-4741-bcb6-5961659751c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.354198] env[62619]: DEBUG nova.compute.provider_tree [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 597.561752] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8b7984da-b0cc-46bc-8f56-7b8794151b80 tempest-ServersV294TestFqdnHostnames-1682625067 tempest-ServersV294TestFqdnHostnames-1682625067-project-member] Lock "98b43e0d-87cd-4be8-87b3-3b94ebc97cf9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.865s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 597.609941] env[62619]: DEBUG nova.network.neutron [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 597.730563] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364780, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.930328} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 597.730563] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Copied Virtual Disk [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk to [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 597.730912] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleting the datastore file [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 597.730912] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-426e0176-924e-4f90-a72f-ba019cfd3958 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 597.737617] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 597.737617] env[62619]: value = "task-1364781" [ 597.737617] env[62619]: _type = "Task" [ 597.737617] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 597.749401] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364781, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 597.860909] env[62619]: DEBUG nova.scheduler.client.report [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 598.069564] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 598.114025] env[62619]: INFO nova.compute.manager [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] [instance: 72997052-6221-4a7b-abb7-07a7ce87bf3c] Took 1.06 seconds to deallocate network for instance. [ 598.249953] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364781, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025988} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.250501] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 598.250892] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Moving file from [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895/a3c7be48-0721-419b-bbd6-8b4cc36c5604 to [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604. {{(pid=62619) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 598.251591] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-c046cb54-4067-476d-924f-d477054585df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.261107] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 598.261107] env[62619]: value = "task-1364782" [ 598.261107] env[62619]: _type = "Task" [ 598.261107] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.274652] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364782, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.367942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.559s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 598.368710] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 598.372353] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.146s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 598.373861] env[62619]: INFO nova.compute.claims [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 598.598369] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 598.771379] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364782, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.023693} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 598.772055] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] File moved {{(pid=62619) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 598.772666] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Cleaning up location [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 598.772962] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleting the datastore file [datastore2] vmware_temp/6889f85d-b59c-459b-add8-5ef276db1895 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 598.773344] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccbfaa76-9eeb-4ede-a03b-8b123d1045ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 598.784032] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 598.784032] env[62619]: value = "task-1364783" [ 598.784032] env[62619]: _type = "Task" [ 598.784032] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 598.793441] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364783, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 598.882558] env[62619]: DEBUG nova.compute.utils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 598.888991] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 598.888991] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 598.953682] env[62619]: DEBUG nova.policy [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c868bf93d8d641f79d56848b143a50b7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6645bb6c9f284dc48dde5e383f4e3f5e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 599.161111] env[62619]: INFO nova.scheduler.client.report [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Deleted allocations for instance 72997052-6221-4a7b-abb7-07a7ce87bf3c [ 599.298524] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364783, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.041461} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.298524] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 599.299942] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc5e2ebc-db36-4ce7-9cef-8ff934c62973 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.309860] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 599.309860] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ebdcb4-ebb6-1ce0-d6e4-52fbc4648367" [ 599.309860] env[62619]: _type = "Task" [ 599.309860] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.328343] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ebdcb4-ebb6-1ce0-d6e4-52fbc4648367, 'name': SearchDatastore_Task, 'duration_secs': 0.009955} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 599.328343] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 599.328343] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 599.328343] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b9a52b30-9855-4a6f-9cea-d9ad2214c9b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.336064] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 599.336064] env[62619]: value = "task-1364784" [ 599.336064] env[62619]: _type = "Task" [ 599.336064] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 599.343955] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.389886] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 599.603969] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Successfully created port: 96cc2528-5a4f-4884-97dc-080a74195033 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 599.672181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3c50270e-95ea-42ae-a494-10f092b7057d tempest-VolumesAssistedSnapshotsTest-711297406 tempest-VolumesAssistedSnapshotsTest-711297406-project-member] Lock "72997052-6221-4a7b-abb7-07a7ce87bf3c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.635s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 599.814142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b629ca99-5d9d-4c32-9e8f-3624a9d63d82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.829234] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfb94b4-67e1-41b8-9d54-709b5dfc1549 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.867086] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffb36d54-bf64-4913-849a-eaa6aba0cd3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.874436] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364784, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 599.879395] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49708b79-27c5-4453-8d69-9bb4701ceb0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 599.899029] env[62619]: DEBUG nova.compute.provider_tree [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 600.179168] env[62619]: DEBUG nova.compute.manager [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 600.374299] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364784, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540648} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.374675] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 600.374972] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 600.375294] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5c8409b9-635a-4cd0-b743-eea0866cccad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.381964] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 600.381964] env[62619]: value = "task-1364785" [ 600.381964] env[62619]: _type = "Task" [ 600.381964] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.390360] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364785, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 600.404925] env[62619]: DEBUG nova.scheduler.client.report [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 600.414237] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 600.457477] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 600.457937] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 600.458050] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 600.458407] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 600.458632] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 600.458820] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 600.459078] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 600.459400] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 600.459400] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 600.459602] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 600.459973] env[62619]: DEBUG nova.virt.hardware [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 600.461376] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40422193-c458-41b9-9a25-3a6cf7c9c930 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.469570] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36cc0a35-b26c-46fc-be2b-4235932ebfd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.548960] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Acquiring lock "709ed215-d501-409a-ab80-6c4b844d24e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.549209] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Lock "709ed215-d501-409a-ab80-6c4b844d24e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.703999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 600.892965] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364785, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065262} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 600.893296] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 600.894054] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e946ddf-dc01-4a4f-8129-b1e7ed84b4dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.923251] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 600.924122] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.552s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 600.927844] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 600.930703] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1867231b-0b25-4f10-963f-8b2bfe6aa8d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 600.948654] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.130s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 600.961026] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 600.961026] env[62619]: value = "task-1364786" [ 600.961026] env[62619]: _type = "Task" [ 600.961026] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 600.970141] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364786, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.450013] env[62619]: DEBUG nova.compute.utils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 601.452209] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 601.453519] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 601.467686] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364786, 'name': ReconfigVM_Task, 'duration_secs': 0.281865} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.467929] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 601.471318] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-dd43758b-2e7f-4871-82d5-44a05e64744a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.481245] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 601.481245] env[62619]: value = "task-1364787" [ 601.481245] env[62619]: _type = "Task" [ 601.481245] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 601.491561] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364787, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 601.519947] env[62619]: DEBUG nova.compute.manager [req-e13089e9-362b-49d1-9cfd-7e1f51b66fec req-11707539-245b-4c68-b01c-ccef19de47bf service nova] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Received event network-changed-96cc2528-5a4f-4884-97dc-080a74195033 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 601.519947] env[62619]: DEBUG nova.compute.manager [req-e13089e9-362b-49d1-9cfd-7e1f51b66fec req-11707539-245b-4c68-b01c-ccef19de47bf service nova] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Refreshing instance network info cache due to event network-changed-96cc2528-5a4f-4884-97dc-080a74195033. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 601.521040] env[62619]: DEBUG oslo_concurrency.lockutils [req-e13089e9-362b-49d1-9cfd-7e1f51b66fec req-11707539-245b-4c68-b01c-ccef19de47bf service nova] Acquiring lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 601.521313] env[62619]: DEBUG oslo_concurrency.lockutils [req-e13089e9-362b-49d1-9cfd-7e1f51b66fec req-11707539-245b-4c68-b01c-ccef19de47bf service nova] Acquired lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 601.521850] env[62619]: DEBUG nova.network.neutron [req-e13089e9-362b-49d1-9cfd-7e1f51b66fec req-11707539-245b-4c68-b01c-ccef19de47bf service nova] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Refreshing network info cache for port 96cc2528-5a4f-4884-97dc-080a74195033 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 601.543789] env[62619]: DEBUG nova.policy [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ce526a1d824fe8b6573fa80adcd53f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33242a5e0a764cf3b8af687fc4302e8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 601.879500] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c616b6-5ed2-4ad4-b329-04b011b7dc6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.889420] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d832177e-5f3d-44b4-84ca-41499b84a13e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.923617] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bad7b1ce-bd54-4179-979a-9c82bcacf9b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.932512] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0611855d-af67-4845-9c9e-226e8bf72957 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 601.948546] env[62619]: DEBUG nova.compute.provider_tree [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 601.954147] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 601.994052] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364787, 'name': Rename_Task, 'duration_secs': 0.144271} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 601.994839] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 601.994894] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0719e15d-e8ca-4e19-a4a4-7c850966a5c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.002774] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 602.002774] env[62619]: value = "task-1364788" [ 602.002774] env[62619]: _type = "Task" [ 602.002774] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 602.012491] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364788, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 602.121921] env[62619]: DEBUG nova.network.neutron [req-e13089e9-362b-49d1-9cfd-7e1f51b66fec req-11707539-245b-4c68-b01c-ccef19de47bf service nova] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 602.241403] env[62619]: ERROR nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. [ 602.241403] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 602.241403] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.241403] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 602.241403] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.241403] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 602.241403] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.241403] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 602.241403] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.241403] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 602.241403] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.241403] env[62619]: ERROR nova.compute.manager raise self.value [ 602.241403] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.241403] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 602.241403] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.241403] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 602.242386] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.242386] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 602.242386] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. [ 602.242386] env[62619]: ERROR nova.compute.manager [ 602.242386] env[62619]: Traceback (most recent call last): [ 602.242386] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 602.242386] env[62619]: listener.cb(fileno) [ 602.242386] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.242386] env[62619]: result = function(*args, **kwargs) [ 602.242386] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.242386] env[62619]: return func(*args, **kwargs) [ 602.242386] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.242386] env[62619]: raise e [ 602.242386] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.242386] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 602.242386] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.242386] env[62619]: created_port_ids = self._update_ports_for_instance( [ 602.242386] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.242386] env[62619]: with excutils.save_and_reraise_exception(): [ 602.242386] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.242386] env[62619]: self.force_reraise() [ 602.242386] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.242386] env[62619]: raise self.value [ 602.242386] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.242386] env[62619]: updated_port = self._update_port( [ 602.242386] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.242386] env[62619]: _ensure_no_port_binding_failure(port) [ 602.242386] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.242386] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 602.243818] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. [ 602.243818] env[62619]: Removing descriptor: 18 [ 602.243818] env[62619]: ERROR nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Traceback (most recent call last): [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] yield resources [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self.driver.spawn(context, instance, image_meta, [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.243818] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] vm_ref = self.build_virtual_machine(instance, [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] for vif in network_info: [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] return self._sync_wrapper(fn, *args, **kwargs) [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self.wait() [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self[:] = self._gt.wait() [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] return self._exit_event.wait() [ 602.244575] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] result = hub.switch() [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] return self.greenlet.switch() [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] result = function(*args, **kwargs) [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] return func(*args, **kwargs) [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] raise e [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] nwinfo = self.network_api.allocate_for_instance( [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.245684] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] created_port_ids = self._update_ports_for_instance( [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] with excutils.save_and_reraise_exception(): [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self.force_reraise() [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] raise self.value [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] updated_port = self._update_port( [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] _ensure_no_port_binding_failure(port) [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.247101] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] raise exception.PortBindingFailed(port_id=port['id']) [ 602.247456] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] nova.exception.PortBindingFailed: Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. [ 602.247456] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] [ 602.247456] env[62619]: INFO nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Terminating instance [ 602.247607] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Acquiring lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.456842] env[62619]: DEBUG nova.scheduler.client.report [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 602.516739] env[62619]: DEBUG oslo_vmware.api [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364788, 'name': PowerOnVM_Task, 'duration_secs': 0.442862} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 602.517097] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 602.517691] env[62619]: INFO nova.compute.manager [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Took 8.61 seconds to spawn the instance on the hypervisor. [ 602.517691] env[62619]: DEBUG nova.compute.manager [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 602.518535] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015519f2-78d2-4295-bbc4-277acd0b728b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 602.524626] env[62619]: DEBUG nova.network.neutron [req-e13089e9-362b-49d1-9cfd-7e1f51b66fec req-11707539-245b-4c68-b01c-ccef19de47bf service nova] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 602.589079] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Successfully created port: c495a2a5-cb70-40fe-b790-8849bc1d1fe5 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 602.970377] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.021s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 602.971487] env[62619]: ERROR nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Traceback (most recent call last): [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self.driver.spawn(context, instance, image_meta, [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self._vmops.spawn(context, instance, image_meta, injected_files, [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] vm_ref = self.build_virtual_machine(instance, [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] vif_infos = vmwarevif.get_vif_info(self._session, [ 602.971487] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] for vif in network_info: [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] return self._sync_wrapper(fn, *args, **kwargs) [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self.wait() [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self[:] = self._gt.wait() [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] return self._exit_event.wait() [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] result = hub.switch() [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 602.972055] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] return self.greenlet.switch() [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] result = function(*args, **kwargs) [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] return func(*args, **kwargs) [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] raise e [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] nwinfo = self.network_api.allocate_for_instance( [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] created_port_ids = self._update_ports_for_instance( [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] with excutils.save_and_reraise_exception(): [ 602.972440] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] self.force_reraise() [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] raise self.value [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] updated_port = self._update_port( [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] _ensure_no_port_binding_failure(port) [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] raise exception.PortBindingFailed(port_id=port['id']) [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] nova.exception.PortBindingFailed: Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. [ 602.974312] env[62619]: ERROR nova.compute.manager [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] [ 602.974681] env[62619]: DEBUG nova.compute.utils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 602.975543] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 602.978107] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Build of instance 748a4c26-6df7-4a4c-b81a-0b3e59a8b936 was re-scheduled: Binding failed for port 7964180c-d61d-4aa2-a968-9e61eec0a3c5, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 602.979979] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 602.979979] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Acquiring lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 602.979979] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Acquired lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 602.979979] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 602.980850] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.342s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 602.984028] env[62619]: INFO nova.compute.claims [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 603.012735] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 603.012987] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 603.013245] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 603.014141] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 603.014141] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 603.014141] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 603.014141] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 603.014141] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 603.014453] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 603.014453] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 603.015600] env[62619]: DEBUG nova.virt.hardware [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 603.015600] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477a4bb8-cf54-4b1b-977e-880451278347 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.025367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88832292-e552-4ca5-a468-dd9a1ef73380 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 603.032363] env[62619]: DEBUG oslo_concurrency.lockutils [req-e13089e9-362b-49d1-9cfd-7e1f51b66fec req-11707539-245b-4c68-b01c-ccef19de47bf service nova] Releasing lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 603.042162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Acquired lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 603.042162] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 603.055380] env[62619]: INFO nova.compute.manager [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Took 36.37 seconds to build instance. [ 603.522347] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.560232] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9198e66e-de94-4ecd-a9b7-232bd2004487 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "79a94ed1-1139-4194-8091-00b7b1562330" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 47.816s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 603.575717] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 603.643458] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.726860] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 603.786395] env[62619]: DEBUG nova.compute.manager [req-d47f2474-a92d-4021-86fc-de28beb45862 req-337b2427-62f8-4f45-875a-09cfa232eda4 service nova] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Received event network-vif-deleted-96cc2528-5a4f-4884-97dc-080a74195033 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 603.963723] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Acquiring lock "7d8ae6c2-1453-4d61-a2b5-311a557087de" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 603.964163] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Lock "7d8ae6c2-1453-4d61-a2b5-311a557087de" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 604.066297] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 604.146273] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Releasing lock "refresh_cache-748a4c26-6df7-4a4c-b81a-0b3e59a8b936" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.146516] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 604.146676] env[62619]: DEBUG nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 604.146862] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 604.180172] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.230852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Releasing lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 604.231320] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 604.231504] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 604.232131] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3b2a1209-9676-48de-a33f-5fbf8fc7666a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.243694] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e8d44e8-54f2-45da-ac83-8c9647d40eb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.269939] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d22ac031-3202-457c-8bb7-d557ad0fe9f9 could not be found. [ 604.270193] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 604.270367] env[62619]: INFO nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 604.271487] env[62619]: DEBUG oslo.service.loopingcall [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 604.277605] env[62619]: DEBUG nova.compute.manager [-] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 604.277605] env[62619]: DEBUG nova.network.neutron [-] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 604.326881] env[62619]: DEBUG nova.network.neutron [-] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 604.383033] env[62619]: DEBUG nova.compute.manager [req-8276c430-3101-4171-944c-7a3ee96392d3 req-17b504ca-d100-49a0-97ea-a2349cd3d521 service nova] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Received event network-changed-c495a2a5-cb70-40fe-b790-8849bc1d1fe5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 604.383298] env[62619]: DEBUG nova.compute.manager [req-8276c430-3101-4171-944c-7a3ee96392d3 req-17b504ca-d100-49a0-97ea-a2349cd3d521 service nova] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Refreshing instance network info cache due to event network-changed-c495a2a5-cb70-40fe-b790-8849bc1d1fe5. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 604.383509] env[62619]: DEBUG oslo_concurrency.lockutils [req-8276c430-3101-4171-944c-7a3ee96392d3 req-17b504ca-d100-49a0-97ea-a2349cd3d521 service nova] Acquiring lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.383783] env[62619]: DEBUG oslo_concurrency.lockutils [req-8276c430-3101-4171-944c-7a3ee96392d3 req-17b504ca-d100-49a0-97ea-a2349cd3d521 service nova] Acquired lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 604.383950] env[62619]: DEBUG nova.network.neutron [req-8276c430-3101-4171-944c-7a3ee96392d3 req-17b504ca-d100-49a0-97ea-a2349cd3d521 service nova] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Refreshing network info cache for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 604.441675] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a602cd68-6a24-47e4-ac3a-1aee81aa9d1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.450436] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5c29fc3e-ce63-43de-a7d1-b3e71ab84bdb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.482934] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7605144-4f15-4e52-a1b5-79aa113c9a04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.490751] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1eded40-6ab0-4f91-9848-9e270dfbd937 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.508132] env[62619]: DEBUG nova.compute.provider_tree [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 604.540134] env[62619]: ERROR nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. [ 604.540134] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 604.540134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.540134] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 604.540134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.540134] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 604.540134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.540134] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 604.540134] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.540134] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 604.540134] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.540134] env[62619]: ERROR nova.compute.manager raise self.value [ 604.540134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.540134] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 604.540134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.540134] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 604.541981] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.541981] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 604.541981] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. [ 604.541981] env[62619]: ERROR nova.compute.manager [ 604.541981] env[62619]: Traceback (most recent call last): [ 604.541981] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 604.541981] env[62619]: listener.cb(fileno) [ 604.541981] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.541981] env[62619]: result = function(*args, **kwargs) [ 604.541981] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 604.541981] env[62619]: return func(*args, **kwargs) [ 604.541981] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.541981] env[62619]: raise e [ 604.541981] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.541981] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 604.541981] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.541981] env[62619]: created_port_ids = self._update_ports_for_instance( [ 604.541981] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.541981] env[62619]: with excutils.save_and_reraise_exception(): [ 604.541981] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.541981] env[62619]: self.force_reraise() [ 604.541981] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.541981] env[62619]: raise self.value [ 604.541981] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.541981] env[62619]: updated_port = self._update_port( [ 604.541981] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.541981] env[62619]: _ensure_no_port_binding_failure(port) [ 604.541981] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.541981] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 604.543782] env[62619]: nova.exception.PortBindingFailed: Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. [ 604.543782] env[62619]: Removing descriptor: 17 [ 604.543782] env[62619]: ERROR nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Traceback (most recent call last): [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] yield resources [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self.driver.spawn(context, instance, image_meta, [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 604.543782] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] vm_ref = self.build_virtual_machine(instance, [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] vif_infos = vmwarevif.get_vif_info(self._session, [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] for vif in network_info: [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] return self._sync_wrapper(fn, *args, **kwargs) [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self.wait() [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self[:] = self._gt.wait() [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] return self._exit_event.wait() [ 604.544285] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] result = hub.switch() [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] return self.greenlet.switch() [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] result = function(*args, **kwargs) [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] return func(*args, **kwargs) [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] raise e [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] nwinfo = self.network_api.allocate_for_instance( [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 604.544827] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] created_port_ids = self._update_ports_for_instance( [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] with excutils.save_and_reraise_exception(): [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self.force_reraise() [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] raise self.value [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] updated_port = self._update_port( [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] _ensure_no_port_binding_failure(port) [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 604.545359] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] raise exception.PortBindingFailed(port_id=port['id']) [ 604.546854] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] nova.exception.PortBindingFailed: Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. [ 604.546854] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] [ 604.546854] env[62619]: INFO nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Terminating instance [ 604.546854] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 604.595180] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 604.683369] env[62619]: DEBUG nova.network.neutron [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.782829] env[62619]: INFO nova.compute.manager [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Rebuilding instance [ 604.834568] env[62619]: DEBUG nova.network.neutron [-] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 604.839134] env[62619]: DEBUG nova.compute.manager [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 604.839134] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18449db4-2572-49c3-a925-f479af887512 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 604.928637] env[62619]: DEBUG nova.network.neutron [req-8276c430-3101-4171-944c-7a3ee96392d3 req-17b504ca-d100-49a0-97ea-a2349cd3d521 service nova] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 605.010884] env[62619]: DEBUG nova.scheduler.client.report [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 605.115185] env[62619]: DEBUG nova.network.neutron [req-8276c430-3101-4171-944c-7a3ee96392d3 req-17b504ca-d100-49a0-97ea-a2349cd3d521 service nova] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 605.189176] env[62619]: INFO nova.compute.manager [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] [instance: 748a4c26-6df7-4a4c-b81a-0b3e59a8b936] Took 1.04 seconds to deallocate network for instance. [ 605.342234] env[62619]: INFO nova.compute.manager [-] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Took 1.07 seconds to deallocate network for instance. [ 605.344949] env[62619]: DEBUG nova.compute.claims [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 605.344949] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 605.358021] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 605.358021] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-696a249b-066f-4fe9-80c4-2120ebd256b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.367353] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 605.367353] env[62619]: value = "task-1364793" [ 605.367353] env[62619]: _type = "Task" [ 605.367353] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.376749] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364793, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 605.518071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.537s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 605.518601] env[62619]: DEBUG nova.compute.manager [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 605.521635] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.678s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 605.618217] env[62619]: DEBUG oslo_concurrency.lockutils [req-8276c430-3101-4171-944c-7a3ee96392d3 req-17b504ca-d100-49a0-97ea-a2349cd3d521 service nova] Releasing lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 605.618571] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 605.618746] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 605.883297] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364793, 'name': PowerOffVM_Task, 'duration_secs': 0.126448} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 605.883607] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 605.883825] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 605.887480] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095275ac-91e4-4cc6-afd2-b3b01cb2cd0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.898406] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 605.898527] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-cd2fbbaa-7bc7-439e-b1ca-7c2ec05fd070 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.930998] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 605.931084] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 605.931932] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleting the datastore file [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 605.931932] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-498512e7-e56c-4fcf-9748-1e531bd193a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.939172] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 605.939172] env[62619]: value = "task-1364796" [ 605.939172] env[62619]: _type = "Task" [ 605.939172] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 605.949306] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364796, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 606.034784] env[62619]: DEBUG nova.compute.utils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 606.037552] env[62619]: DEBUG nova.compute.manager [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 606.156771] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 606.220604] env[62619]: INFO nova.scheduler.client.report [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Deleted allocations for instance 748a4c26-6df7-4a4c-b81a-0b3e59a8b936 [ 606.302125] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 606.441441] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4380bb9-f682-4c5d-8a7b-4df9042dcf04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.454790] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44ec7ed0-adac-4e86-a9fe-c5dffd141a1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.457827] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364796, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144879} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 606.458394] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 606.458578] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 606.458752] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 606.493429] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9aa5da1f-1989-458d-9202-07449523f6c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.504280] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d30c88e6-eb01-4dd6-a198-37e3f8e6502b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.520270] env[62619]: DEBUG nova.compute.provider_tree [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 606.539130] env[62619]: DEBUG nova.compute.manager [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 606.647476] env[62619]: DEBUG nova.compute.manager [req-8c1e4cdb-1766-471c-ba57-6c7f69a6eae8 req-8d748e64-803e-4088-aa08-3321d21e37cd service nova] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Received event network-vif-deleted-c495a2a5-cb70-40fe-b790-8849bc1d1fe5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 606.738197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dfe2687c-e23c-4560-b704-34211144f01b tempest-ServerExternalEventsTest-1962781045 tempest-ServerExternalEventsTest-1962781045-project-member] Lock "748a4c26-6df7-4a4c-b81a-0b3e59a8b936" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.518s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 606.805365] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 606.805839] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 606.805979] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 606.806289] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c2b5a0b3-aebb-463f-808c-c554149b6e3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.816870] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87f52246-1e73-465a-afd8-ba279c072d72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 606.838105] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5680b562-8e8d-4fed-8b48-ec7add23ed7b could not be found. [ 606.838482] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 606.838565] env[62619]: INFO nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 606.838788] env[62619]: DEBUG oslo.service.loopingcall [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 606.839048] env[62619]: DEBUG nova.compute.manager [-] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 606.839149] env[62619]: DEBUG nova.network.neutron [-] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 606.874508] env[62619]: DEBUG nova.network.neutron [-] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 607.008507] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Acquiring lock "de395cf9-2888-4a0d-a1b8-5ce4c36d6182" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.008507] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Lock "de395cf9-2888-4a0d-a1b8-5ce4c36d6182" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.024013] env[62619]: DEBUG nova.scheduler.client.report [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 607.241663] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 607.378012] env[62619]: DEBUG nova.network.neutron [-] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 607.504197] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.504197] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.504366] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.504409] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.504549] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.504684] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.505656] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.505906] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.507028] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.507028] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.507028] env[62619]: DEBUG nova.virt.hardware [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.507614] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66e14dd-ea67-4b4c-8e1a-07a43cc248af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.517486] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ad2118-6b51-44cb-ba50-7ae2b58a3c1e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.534179] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.012s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 607.536254] env[62619]: ERROR nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Traceback (most recent call last): [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self.driver.spawn(context, instance, image_meta, [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] vm_ref = self.build_virtual_machine(instance, [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] vif_infos = vmwarevif.get_vif_info(self._session, [ 607.536254] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] for vif in network_info: [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] return self._sync_wrapper(fn, *args, **kwargs) [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self.wait() [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self[:] = self._gt.wait() [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] return self._exit_event.wait() [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] result = hub.switch() [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 607.536632] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] return self.greenlet.switch() [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] result = function(*args, **kwargs) [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] return func(*args, **kwargs) [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] raise e [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] nwinfo = self.network_api.allocate_for_instance( [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] created_port_ids = self._update_ports_for_instance( [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] with excutils.save_and_reraise_exception(): [ 607.537066] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] self.force_reraise() [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] raise self.value [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] updated_port = self._update_port( [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] _ensure_no_port_binding_failure(port) [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] raise exception.PortBindingFailed(port_id=port['id']) [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] nova.exception.PortBindingFailed: Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. [ 607.537493] env[62619]: ERROR nova.compute.manager [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] [ 607.537915] env[62619]: DEBUG nova.compute.utils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 607.540129] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.545689] env[62619]: DEBUG oslo.service.loopingcall [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.546133] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Build of instance 5e419be8-cfb9-4819-8e92-873daa313d7a was re-scheduled: Binding failed for port 9b83d46d-092a-484d-876f-43532f8afecf, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 607.546587] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 607.546818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Acquiring lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 607.548100] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Acquired lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 607.548100] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 607.548439] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.644s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 607.549972] env[62619]: INFO nova.compute.claims [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 607.552401] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 607.554449] env[62619]: DEBUG nova.compute.manager [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 607.555738] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c238bb13-621e-4d6f-8ee7-839ce15e1faa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.575833] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.575833] env[62619]: value = "task-1364797" [ 607.575833] env[62619]: _type = "Task" [ 607.575833] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.586597] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364797, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.595427] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.595427] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.595573] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.595737] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.595875] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.596016] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.596214] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.596360] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.597182] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.597182] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.597182] env[62619]: DEBUG nova.virt.hardware [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.597709] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ad78d11-401d-4adc-b3cc-21a2db8fcf86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.606283] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f468bf0-356e-458e-afd3-97853f971dd7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.620510] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 607.625891] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Creating folder: Project (1c2eb536e35148bd9b26f04b988d1fdb). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.626803] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-14fc355f-3b54-4285-819f-b4c3b9cb43b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.636975] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Created folder: Project (1c2eb536e35148bd9b26f04b988d1fdb) in parent group-v290436. [ 607.637262] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Creating folder: Instances. Parent ref: group-v290444. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 607.637501] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f7c4ad85-39e3-4649-9f68-017fca520e85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.646910] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Created folder: Instances in parent group-v290444. [ 607.647160] env[62619]: DEBUG oslo.service.loopingcall [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 607.647348] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 607.647556] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-eb20a20c-bf38-4c37-8409-54880d6193d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.666653] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 607.666653] env[62619]: value = "task-1364800" [ 607.666653] env[62619]: _type = "Task" [ 607.666653] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 607.675046] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364800, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 607.774983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 607.882030] env[62619]: INFO nova.compute.manager [-] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Took 1.04 seconds to deallocate network for instance. [ 607.889780] env[62619]: DEBUG nova.compute.claims [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 607.889780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.080209] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.087399] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364797, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.179950] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364800, 'name': CreateVM_Task, 'duration_secs': 0.298896} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 608.180136] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 608.181191] env[62619]: DEBUG oslo_vmware.service [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a04ee42-ff30-4f1b-bafb-dd622a2cd77e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.195222] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 608.196349] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.196714] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.197257] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 608.197673] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b4230b9-d260-4265-9618-60c963a2a953 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.203653] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 608.203653] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52696c7c-894f-e6b3-49d6-e09a45117c4b" [ 608.203653] env[62619]: _type = "Task" [ 608.203653] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.214041] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52696c7c-894f-e6b3-49d6-e09a45117c4b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.586663] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364797, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.703443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Releasing lock "refresh_cache-5e419be8-cfb9-4819-8e92-873daa313d7a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.703669] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 608.703846] env[62619]: DEBUG nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 608.704078] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 608.725837] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 608.726085] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 608.726322] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 608.726483] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 608.726654] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 608.727149] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bc8ee097-b5eb-4608-9437-5b691a1361c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.739470] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 608.747326] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 608.747809] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 608.748865] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cbc600b-785f-4d5f-9ced-1e19c7074021 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.757040] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b469606-0a9f-4608-8a9f-f0e56b9f8acb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 608.765602] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 608.765602] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52aa598c-ba51-ea7a-e094-c27df72c20a9" [ 608.765602] env[62619]: _type = "Task" [ 608.765602] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 608.776454] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52aa598c-ba51-ea7a-e094-c27df72c20a9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 608.870582] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "37bb9933-80fa-4a54-82fe-f864a411425f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.871224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "37bb9933-80fa-4a54-82fe-f864a411425f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 608.893017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "d02f8bcb-c754-4308-9c90-260624010cb0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 608.893017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "d02f8bcb-c754-4308-9c90-260624010cb0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 609.010360] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99746c03-b15a-4053-bc1b-dd8a26b0d16e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.019791] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68255e2e-5529-4ebd-98c0-d1304b78eafc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.060421] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850a5847-d52b-4ff2-b162-2ca660d8de39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.069123] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56cfe20c-00bf-493c-aee2-8cfc57a5b36f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.082909] env[62619]: DEBUG nova.compute.provider_tree [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 609.091572] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364797, 'name': CreateVM_Task, 'duration_secs': 1.309036} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 609.091718] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 609.092162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.093204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 609.093204] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 609.093204] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d18486cd-a606-4a44-b74f-b4966c659236 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.097403] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 609.097403] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dc91fa-1140-6f4e-064b-5aa130f48b24" [ 609.097403] env[62619]: _type = "Task" [ 609.097403] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 609.106214] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dc91fa-1140-6f4e-064b-5aa130f48b24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 609.242717] env[62619]: DEBUG nova.network.neutron [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 609.277244] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 609.277964] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Creating directory with path [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 609.277964] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d594d0e-80fd-456a-a0ba-ed850018576c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.308596] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Created directory with path [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 609.309397] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Fetch image to [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 609.309397] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Downloading image file data a3c7be48-0721-419b-bbd6-8b4cc36c5604 to [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk on the data store datastore1 {{(pid=62619) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 609.309981] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a085e6c-e048-463d-9c52-eb2d4ef8dbfa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.351858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3edb6b-c439-4bdc-9911-e03d7fab77fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.351858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04352b76-b24f-418a-a7c8-1dd2ac105cbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.373040] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-679a732d-aa1c-4e39-95b8-03fd1d983cf7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.380268] env[62619]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c6552e83-9bf9-4001-9d63-a57c0f65c6c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 609.475402] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Downloading image file data a3c7be48-0721-419b-bbd6-8b4cc36c5604 to the data store datastore1 {{(pid=62619) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 609.554448] env[62619]: DEBUG oslo_vmware.rw_handles [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 609.618355] env[62619]: DEBUG nova.scheduler.client.report [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 609.642597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 609.642815] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 609.643040] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 609.744451] env[62619]: INFO nova.compute.manager [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] [instance: 5e419be8-cfb9-4819-8e92-873daa313d7a] Took 1.04 seconds to deallocate network for instance. [ 610.133154] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.585s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 610.133711] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 610.136496] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.707s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 610.255358] env[62619]: DEBUG oslo_vmware.rw_handles [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 610.255485] env[62619]: DEBUG oslo_vmware.rw_handles [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 610.397080] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Downloaded image file data a3c7be48-0721-419b-bbd6-8b4cc36c5604 to vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk on the data store datastore1 {{(pid=62619) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 610.399139] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 610.399383] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Copying Virtual Disk [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk to [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 610.399662] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-81938d55-c95a-46d5-8cc3-ffddc9d6cfe0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 610.406952] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 610.406952] env[62619]: value = "task-1364803" [ 610.406952] env[62619]: _type = "Task" [ 610.406952] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 610.414675] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 610.642128] env[62619]: DEBUG nova.compute.utils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 610.643710] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 610.643794] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 610.781673] env[62619]: DEBUG nova.policy [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '38226df72425401b9396a5ce15c8c1f5', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1b74a31666884e19b5a91325d435a6e6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 610.794997] env[62619]: INFO nova.scheduler.client.report [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Deleted allocations for instance 5e419be8-cfb9-4819-8e92-873daa313d7a [ 610.917404] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364803, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.101186] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a6d054a-2bea-43cc-821d-3723e53ee887 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.109421] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-680ac72d-c5c9-48f8-9c73-42fc76957066 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.152442] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 611.157410] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92f9f39a-861e-4b0f-b201-8fd542d86917 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.165252] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc5a382b-8a0b-4021-bbf8-c0bd046ab4db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.178642] env[62619]: DEBUG nova.compute.provider_tree [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.224891] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Successfully created port: 539a2943-0cc1-46ab-b932-112b4866f823 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 611.305576] env[62619]: DEBUG oslo_concurrency.lockutils [None req-25c11453-3294-40de-9dcc-c005eb80bb5d tempest-ServersWithSpecificFlavorTestJSON-971329770 tempest-ServersWithSpecificFlavorTestJSON-971329770-project-member] Lock "5e419be8-cfb9-4819-8e92-873daa313d7a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.785s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 611.421135] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364803, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679091} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.421436] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Copied Virtual Disk [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk to [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 611.421627] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleting the datastore file [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604/tmp-sparse.vmdk {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 611.421872] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64a887f6-e0bc-434d-aff8-db1316887d4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.437040] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 611.437040] env[62619]: value = "task-1364804" [ 611.437040] env[62619]: _type = "Task" [ 611.437040] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.442968] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364804, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 611.682344] env[62619]: DEBUG nova.scheduler.client.report [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 611.809149] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 611.943899] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364804, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.025029} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 611.944210] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 611.944477] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Moving file from [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8/a3c7be48-0721-419b-bbd6-8b4cc36c5604 to [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604. {{(pid=62619) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:334}} [ 611.944869] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MoveDatastoreFile_Task with opID=oslo.vmware-4c87c067-b5ff-4de3-bb0a-f12449377198 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.952747] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 611.952747] env[62619]: value = "task-1364805" [ 611.952747] env[62619]: _type = "Task" [ 611.952747] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.964247] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364805, 'name': MoveDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.168295] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 612.190935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.054s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 612.191696] env[62619]: ERROR nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Traceback (most recent call last): [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self.driver.spawn(context, instance, image_meta, [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] vm_ref = self.build_virtual_machine(instance, [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.191696] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] for vif in network_info: [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] return self._sync_wrapper(fn, *args, **kwargs) [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self.wait() [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self[:] = self._gt.wait() [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] return self._exit_event.wait() [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] result = hub.switch() [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.192693] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] return self.greenlet.switch() [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] result = function(*args, **kwargs) [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] return func(*args, **kwargs) [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] raise e [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] nwinfo = self.network_api.allocate_for_instance( [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] created_port_ids = self._update_ports_for_instance( [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] with excutils.save_and_reraise_exception(): [ 612.192999] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] self.force_reraise() [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] raise self.value [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] updated_port = self._update_port( [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] _ensure_no_port_binding_failure(port) [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] raise exception.PortBindingFailed(port_id=port['id']) [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] nova.exception.PortBindingFailed: Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. [ 612.193365] env[62619]: ERROR nova.compute.manager [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] [ 612.193598] env[62619]: DEBUG nova.compute.utils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 612.195402] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.598s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 612.196810] env[62619]: INFO nova.compute.claims [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.200822] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Build of instance 89638fb3-c5f4-48a4-948b-fb6220ed1dca was re-scheduled: Binding failed for port 73faf8f2-a3d7-42da-85b0-667f0caef6f0, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 612.200971] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 612.204026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Acquiring lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.204026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Acquired lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.204026] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 612.212265] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.212530] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.212721] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.212864] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.213018] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.213169] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.213370] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.213523] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.213684] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.213944] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.214106] env[62619]: DEBUG nova.virt.hardware [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.214975] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37e30725-4f86-4883-90b4-2c6d51bc9687 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.226057] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-048232ed-a032-47a8-9b6b-37dff237063f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.347394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 612.463495] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364805, 'name': MoveDatastoreFile_Task, 'duration_secs': 0.023611} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.465098] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] File moved {{(pid=62619) file_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:346}} [ 612.465098] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Cleaning up location [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 612.465098] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleting the datastore file [datastore1] vmware_temp/21714998-44be-4986-86e7-9b107f65c0f8 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 612.465098] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-07e9ae48-8253-4ad3-9cc0-ae129d09ab25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.472728] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 612.472728] env[62619]: value = "task-1364807" [ 612.472728] env[62619]: _type = "Task" [ 612.472728] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.481729] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364807, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.551168] env[62619]: DEBUG nova.compute.manager [req-e94ccd65-8bfc-4f51-9181-149cdbe5ae71 req-280173b8-481c-444f-abf5-1098d766766c service nova] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Received event network-changed-539a2943-0cc1-46ab-b932-112b4866f823 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 612.551168] env[62619]: DEBUG nova.compute.manager [req-e94ccd65-8bfc-4f51-9181-149cdbe5ae71 req-280173b8-481c-444f-abf5-1098d766766c service nova] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Refreshing instance network info cache due to event network-changed-539a2943-0cc1-46ab-b932-112b4866f823. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 612.551168] env[62619]: DEBUG oslo_concurrency.lockutils [req-e94ccd65-8bfc-4f51-9181-149cdbe5ae71 req-280173b8-481c-444f-abf5-1098d766766c service nova] Acquiring lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.551168] env[62619]: DEBUG oslo_concurrency.lockutils [req-e94ccd65-8bfc-4f51-9181-149cdbe5ae71 req-280173b8-481c-444f-abf5-1098d766766c service nova] Acquired lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 612.551168] env[62619]: DEBUG nova.network.neutron [req-e94ccd65-8bfc-4f51-9181-149cdbe5ae71 req-280173b8-481c-444f-abf5-1098d766766c service nova] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Refreshing network info cache for port 539a2943-0cc1-46ab-b932-112b4866f823 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 612.731329] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 612.782243] env[62619]: ERROR nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. [ 612.782243] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 612.782243] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.782243] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 612.782243] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.782243] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 612.782243] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.782243] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 612.782243] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.782243] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 612.782243] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.782243] env[62619]: ERROR nova.compute.manager raise self.value [ 612.782243] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.782243] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 612.782243] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.782243] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 612.782592] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.782592] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 612.782592] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. [ 612.782592] env[62619]: ERROR nova.compute.manager [ 612.782592] env[62619]: Traceback (most recent call last): [ 612.782592] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 612.782592] env[62619]: listener.cb(fileno) [ 612.782592] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.782592] env[62619]: result = function(*args, **kwargs) [ 612.782592] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.782592] env[62619]: return func(*args, **kwargs) [ 612.782592] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.782592] env[62619]: raise e [ 612.782592] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.782592] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 612.782592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.782592] env[62619]: created_port_ids = self._update_ports_for_instance( [ 612.782592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.782592] env[62619]: with excutils.save_and_reraise_exception(): [ 612.782592] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.782592] env[62619]: self.force_reraise() [ 612.782592] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.782592] env[62619]: raise self.value [ 612.782592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.782592] env[62619]: updated_port = self._update_port( [ 612.782592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.782592] env[62619]: _ensure_no_port_binding_failure(port) [ 612.782592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.782592] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 612.783180] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. [ 612.783180] env[62619]: Removing descriptor: 17 [ 612.783180] env[62619]: ERROR nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Traceback (most recent call last): [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] yield resources [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self.driver.spawn(context, instance, image_meta, [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 612.783180] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] vm_ref = self.build_virtual_machine(instance, [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] vif_infos = vmwarevif.get_vif_info(self._session, [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] for vif in network_info: [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] return self._sync_wrapper(fn, *args, **kwargs) [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self.wait() [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self[:] = self._gt.wait() [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] return self._exit_event.wait() [ 612.783463] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] result = hub.switch() [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] return self.greenlet.switch() [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] result = function(*args, **kwargs) [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] return func(*args, **kwargs) [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] raise e [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] nwinfo = self.network_api.allocate_for_instance( [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 612.783761] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] created_port_ids = self._update_ports_for_instance( [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] with excutils.save_and_reraise_exception(): [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self.force_reraise() [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] raise self.value [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] updated_port = self._update_port( [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] _ensure_no_port_binding_failure(port) [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 612.784034] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] raise exception.PortBindingFailed(port_id=port['id']) [ 612.784293] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] nova.exception.PortBindingFailed: Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. [ 612.784293] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] [ 612.784293] env[62619]: INFO nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Terminating instance [ 612.785938] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 612.821017] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 612.983397] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364807, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.05936} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.983789] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 612.984567] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9034ed75-382e-48f3-ab26-d4e83c6e03fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.992045] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 612.992045] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d80118-1ee4-9b11-2a3b-a2c4c5813fa6" [ 612.992045] env[62619]: _type = "Task" [ 612.992045] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.004941] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d80118-1ee4-9b11-2a3b-a2c4c5813fa6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.074286] env[62619]: DEBUG nova.network.neutron [req-e94ccd65-8bfc-4f51-9181-149cdbe5ae71 req-280173b8-481c-444f-abf5-1098d766766c service nova] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.163333] env[62619]: DEBUG nova.network.neutron [req-e94ccd65-8bfc-4f51-9181-149cdbe5ae71 req-280173b8-481c-444f-abf5-1098d766766c service nova] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 613.328607] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Releasing lock "refresh_cache-89638fb3-c5f4-48a4-948b-fb6220ed1dca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.329211] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 613.329211] env[62619]: DEBUG nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 613.329650] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 613.504246] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d80118-1ee4-9b11-2a3b-a2c4c5813fa6, 'name': SearchDatastore_Task, 'duration_secs': 0.015554} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 613.507627] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 613.508941] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.509263] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] b7c425a1-a80d-4a62-a71f-d14fdf638cf7/b7c425a1-a80d-4a62-a71f-d14fdf638cf7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 613.509774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.509855] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.510120] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-680f33f7-f3b7-4588-9f97-d88b3cbb2ab6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.512121] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-21bc7fa9-5a66-47ce-8aaa-5e5c58150345 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.527658] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 613.527658] env[62619]: value = "task-1364808" [ 613.527658] env[62619]: _type = "Task" [ 613.527658] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.533035] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 613.533286] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 613.536937] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-30e45038-0b36-43ac-8d25-7351fcca6514 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.548574] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364808, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.548574] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 613.548574] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f406c7-b9c4-1d30-06a9-2e0cb04f888f" [ 613.548574] env[62619]: _type = "Task" [ 613.548574] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.557944] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f406c7-b9c4-1d30-06a9-2e0cb04f888f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.667116] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80d4a825-cdad-4b5f-89a1-8250d8ca0f6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.671373] env[62619]: DEBUG oslo_concurrency.lockutils [req-e94ccd65-8bfc-4f51-9181-149cdbe5ae71 req-280173b8-481c-444f-abf5-1098d766766c service nova] Releasing lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 613.672194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquired lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 613.672391] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 613.680604] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4ff5edcc-77d3-4d81-829e-ca3043ad5774 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.715597] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f326990-d326-409f-b990-fbed833acfe9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.729179] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75df8f85-c1b5-4542-b34e-9389ef6d4142 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.744713] env[62619]: DEBUG nova.compute.provider_tree [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 614.020259] env[62619]: DEBUG nova.network.neutron [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.046057] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364808, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.070524] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f406c7-b9c4-1d30-06a9-2e0cb04f888f, 'name': SearchDatastore_Task, 'duration_secs': 0.025583} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.072738] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94143094-21cd-45c6-bf6a-341ad246d5af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.080031] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 614.080031] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a483d5-e819-c158-0114-d4342fe1f8bc" [ 614.080031] env[62619]: _type = "Task" [ 614.080031] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.092404] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a483d5-e819-c158-0114-d4342fe1f8bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.198600] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 614.248499] env[62619]: DEBUG nova.scheduler.client.report [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 614.273038] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 614.523933] env[62619]: INFO nova.compute.manager [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] [instance: 89638fb3-c5f4-48a4-948b-fb6220ed1dca] Took 1.19 seconds to deallocate network for instance. [ 614.549612] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364808, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.610502} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.551575] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] b7c425a1-a80d-4a62-a71f-d14fdf638cf7/b7c425a1-a80d-4a62-a71f-d14fdf638cf7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 614.552096] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 614.556342] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a38a45ec-0ef3-4456-9c7d-3214d50c46f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.566707] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 614.566707] env[62619]: value = "task-1364810" [ 614.566707] env[62619]: _type = "Task" [ 614.566707] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.579024] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364810, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.580681] env[62619]: DEBUG nova.compute.manager [req-1da22639-4bee-42db-865b-50a1745ec6e2 req-2cce7f78-400d-4d5e-ab78-aa4ee7bd6611 service nova] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Received event network-vif-deleted-539a2943-0cc1-46ab-b932-112b4866f823 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 614.596900] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a483d5-e819-c158-0114-d4342fe1f8bc, 'name': SearchDatastore_Task, 'duration_secs': 0.042995} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 614.597443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.598229] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 614.598626] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4434a53f-fc15-402b-9df8-1ae88ad76895 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.607323] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 614.607323] env[62619]: value = "task-1364811" [ 614.607323] env[62619]: _type = "Task" [ 614.607323] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 614.616783] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 614.761129] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.564s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 614.761129] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 614.763259] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.059s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 614.764690] env[62619]: INFO nova.compute.claims [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 614.775288] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Releasing lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 614.775784] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 614.776025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 614.776335] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9c1a65d0-1a44-4645-a0f9-5a027ad69f53 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.786159] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7dab3df5-80e9-461c-bf47-a57ba417e340 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 614.813127] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7f17ce32-e674-4275-a313-a3f69dde2ee9 could not be found. [ 614.813389] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 614.813571] env[62619]: INFO nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 614.813820] env[62619]: DEBUG oslo.service.loopingcall [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 614.817833] env[62619]: DEBUG nova.compute.manager [-] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 614.817833] env[62619]: DEBUG nova.network.neutron [-] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 614.832766] env[62619]: DEBUG nova.network.neutron [-] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 615.076997] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364810, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067594} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.077544] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 615.078568] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0f454d-3265-482e-80bb-100dc00a06cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.103088] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Reconfiguring VM instance instance-00000011 to attach disk [datastore1] b7c425a1-a80d-4a62-a71f-d14fdf638cf7/b7c425a1-a80d-4a62-a71f-d14fdf638cf7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 615.103409] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-70789636-c328-4f0f-aafa-b9c99e952860 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.128749] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364811, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.129646] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 615.129646] env[62619]: value = "task-1364812" [ 615.129646] env[62619]: _type = "Task" [ 615.129646] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.138281] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364812, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.271275] env[62619]: DEBUG nova.compute.utils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 615.274171] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 615.274487] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 615.336595] env[62619]: DEBUG nova.network.neutron [-] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 615.363846] env[62619]: DEBUG nova.policy [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '583073a356104830abcbb90490a0d04b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3b993e6d0edb43f1b2e3afd72a08a529', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 615.578034] env[62619]: INFO nova.scheduler.client.report [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Deleted allocations for instance 89638fb3-c5f4-48a4-948b-fb6220ed1dca [ 615.633169] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364811, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.646060] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364812, 'name': ReconfigVM_Task, 'duration_secs': 0.450754} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 615.646801] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Reconfigured VM instance instance-00000011 to attach disk [datastore1] b7c425a1-a80d-4a62-a71f-d14fdf638cf7/b7c425a1-a80d-4a62-a71f-d14fdf638cf7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 615.648280] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3b074bc1-ac2b-4e44-9f9e-fe07e40153f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 615.658125] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 615.658125] env[62619]: value = "task-1364813" [ 615.658125] env[62619]: _type = "Task" [ 615.658125] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 615.670057] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364813, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 615.775748] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 615.840087] env[62619]: INFO nova.compute.manager [-] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Took 1.02 seconds to deallocate network for instance. [ 615.845018] env[62619]: DEBUG nova.compute.claims [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 615.845018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 615.884438] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Successfully created port: cfc22c12-4455-44de-8005-6c8a50fac191 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 616.093500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-00715081-bb39-429b-9569-55bdf122df93 tempest-ServerDiagnosticsTest-813267142 tempest-ServerDiagnosticsTest-813267142-project-member] Lock "89638fb3-c5f4-48a4-948b-fb6220ed1dca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 60.626s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 616.137131] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364811, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.148208} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.137361] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 616.137821] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 616.137876] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-aa557c6d-cc5c-4d42-ae96-a04307b4a9a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.150325] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 616.150325] env[62619]: value = "task-1364814" [ 616.150325] env[62619]: _type = "Task" [ 616.150325] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.158862] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364814, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.172975] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364813, 'name': Rename_Task, 'duration_secs': 0.177544} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.172975] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 616.173053] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-e5c1e00b-5fa3-47a9-b8a6-63ceb339a621 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.185312] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 616.185312] env[62619]: value = "task-1364815" [ 616.185312] env[62619]: _type = "Task" [ 616.185312] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.194216] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364815, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.200546] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c628b27-aa5f-4da7-a854-4576b6f73ed6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.214889] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-260ebc03-302f-4aa5-9778-a417406c26d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.250096] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4978025b-70e0-4b0a-90a4-51646060c039 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.258876] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-816cc202-fedf-4386-903b-922ca70c31f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.276123] env[62619]: DEBUG nova.compute.provider_tree [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 616.597280] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 616.664263] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364814, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072516} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.664263] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 616.664724] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42d5363-75a6-42cc-b661-468dd25db8a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.685762] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Reconfiguring VM instance instance-0000000e to attach disk [datastore1] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 616.686540] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ad0e2032-467f-4da5-9d25-b176ff1865de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.713422] env[62619]: DEBUG oslo_vmware.api [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364815, 'name': PowerOnVM_Task, 'duration_secs': 0.484201} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 616.718143] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 616.718143] env[62619]: INFO nova.compute.manager [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Took 9.16 seconds to spawn the instance on the hypervisor. [ 616.718143] env[62619]: DEBUG nova.compute.manager [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 616.718143] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 616.718143] env[62619]: value = "task-1364817" [ 616.718143] env[62619]: _type = "Task" [ 616.718143] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 616.718143] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30d3e651-7707-42d0-8254-f8b2d1970202 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.733248] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364817, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 616.780346] env[62619]: DEBUG nova.scheduler.client.report [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 616.791263] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 616.826419] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 616.827302] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 616.827381] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 616.827581] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 616.827748] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 616.827917] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 616.828147] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 616.828332] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 616.832023] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 616.832023] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 616.832023] env[62619]: DEBUG nova.virt.hardware [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 616.832023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6081894-d263-44f1-9ebc-0ad59db1e704 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 616.840965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b8a8f84-9e89-44b6-82b9-17d81e5c7c01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.119592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.239817] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364817, 'name': ReconfigVM_Task, 'duration_secs': 0.294811} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.241868] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Reconfigured VM instance instance-0000000e to attach disk [datastore1] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 617.242760] env[62619]: INFO nova.compute.manager [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Took 29.63 seconds to build instance. [ 617.243772] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce3c0703-2c2d-43c9-9f59-249eb87d06d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.254117] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 617.254117] env[62619]: value = "task-1364818" [ 617.254117] env[62619]: _type = "Task" [ 617.254117] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.271743] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364818, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.287117] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.522s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.287117] env[62619]: DEBUG nova.compute.manager [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 617.290325] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.695s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.293160] env[62619]: INFO nova.compute.claims [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.316408] env[62619]: ERROR nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. [ 617.316408] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 617.316408] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.316408] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 617.316408] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.316408] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 617.316408] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.316408] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 617.316408] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.316408] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 617.316408] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.316408] env[62619]: ERROR nova.compute.manager raise self.value [ 617.316408] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.316408] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 617.316408] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.316408] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 617.317935] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.317935] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 617.317935] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. [ 617.317935] env[62619]: ERROR nova.compute.manager [ 617.317935] env[62619]: Traceback (most recent call last): [ 617.317935] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 617.317935] env[62619]: listener.cb(fileno) [ 617.317935] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.317935] env[62619]: result = function(*args, **kwargs) [ 617.317935] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.317935] env[62619]: return func(*args, **kwargs) [ 617.317935] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.317935] env[62619]: raise e [ 617.317935] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.317935] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 617.317935] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.317935] env[62619]: created_port_ids = self._update_ports_for_instance( [ 617.317935] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.317935] env[62619]: with excutils.save_and_reraise_exception(): [ 617.317935] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.317935] env[62619]: self.force_reraise() [ 617.317935] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.317935] env[62619]: raise self.value [ 617.317935] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.317935] env[62619]: updated_port = self._update_port( [ 617.317935] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.317935] env[62619]: _ensure_no_port_binding_failure(port) [ 617.317935] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.317935] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 617.318728] env[62619]: nova.exception.PortBindingFailed: Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. [ 617.318728] env[62619]: Removing descriptor: 18 [ 617.318728] env[62619]: ERROR nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Traceback (most recent call last): [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] yield resources [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self.driver.spawn(context, instance, image_meta, [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self._vmops.spawn(context, instance, image_meta, injected_files, [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 617.318728] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] vm_ref = self.build_virtual_machine(instance, [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] vif_infos = vmwarevif.get_vif_info(self._session, [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] for vif in network_info: [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] return self._sync_wrapper(fn, *args, **kwargs) [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self.wait() [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self[:] = self._gt.wait() [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] return self._exit_event.wait() [ 617.319011] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] result = hub.switch() [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] return self.greenlet.switch() [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] result = function(*args, **kwargs) [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] return func(*args, **kwargs) [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] raise e [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] nwinfo = self.network_api.allocate_for_instance( [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 617.319302] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] created_port_ids = self._update_ports_for_instance( [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] with excutils.save_and_reraise_exception(): [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self.force_reraise() [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] raise self.value [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] updated_port = self._update_port( [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] _ensure_no_port_binding_failure(port) [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 617.319585] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] raise exception.PortBindingFailed(port_id=port['id']) [ 617.320501] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] nova.exception.PortBindingFailed: Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. [ 617.320501] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] [ 617.320501] env[62619]: INFO nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Terminating instance [ 617.325428] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Acquiring lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.326208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Acquired lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 617.326208] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 617.451639] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "25df5673-9633-40de-8e72-a8620f19a6f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.451890] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "25df5673-9633-40de-8e72-a8620f19a6f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 617.516921] env[62619]: DEBUG nova.compute.manager [req-6f9142e3-ac59-4211-bf9a-d997c231f557 req-807ca2d1-1df4-4f36-8fcb-f056b0284767 service nova] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Received event network-changed-cfc22c12-4455-44de-8005-6c8a50fac191 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 617.516921] env[62619]: DEBUG nova.compute.manager [req-6f9142e3-ac59-4211-bf9a-d997c231f557 req-807ca2d1-1df4-4f36-8fcb-f056b0284767 service nova] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Refreshing instance network info cache due to event network-changed-cfc22c12-4455-44de-8005-6c8a50fac191. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 617.517382] env[62619]: DEBUG oslo_concurrency.lockutils [req-6f9142e3-ac59-4211-bf9a-d997c231f557 req-807ca2d1-1df4-4f36-8fcb-f056b0284767 service nova] Acquiring lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 617.749033] env[62619]: DEBUG oslo_concurrency.lockutils [None req-430530dd-cb32-4a31-856f-2dd4424d057f tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 52.102s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 617.769530] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364818, 'name': Rename_Task, 'duration_secs': 0.146553} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.769530] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 617.769530] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7d657016-c102-4c1e-ba81-4889d5848155 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.791510] env[62619]: DEBUG nova.compute.utils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 617.792865] env[62619]: DEBUG nova.compute.manager [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 617.801989] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 617.801989] env[62619]: value = "task-1364819" [ 617.801989] env[62619]: _type = "Task" [ 617.801989] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.811925] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364819, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.860213] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 617.922227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "d92cd356-0e29-429d-9216-b376e91e0fe8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 617.922507] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "d92cd356-0e29-429d-9216-b376e91e0fe8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.022278] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.060132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Acquiring lock "ffae1b5d-83fc-4007-be0f-c6f1e285f824" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.060450] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Lock "ffae1b5d-83fc-4007-be0f-c6f1e285f824" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 618.254150] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 618.295701] env[62619]: DEBUG nova.compute.manager [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 618.321747] env[62619]: DEBUG oslo_vmware.api [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364819, 'name': PowerOnVM_Task, 'duration_secs': 0.531124} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.321827] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 618.322042] env[62619]: DEBUG nova.compute.manager [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 618.322817] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e5dd17-68e9-4ce4-bf06-8736b8e8fc0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.401028] env[62619]: INFO nova.compute.manager [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Rebuilding instance [ 618.457914] env[62619]: DEBUG nova.compute.manager [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 618.458967] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a98fcc-efd0-46fa-a027-f52a12286c02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.525355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Releasing lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 618.525833] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 618.526708] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 618.526708] env[62619]: DEBUG oslo_concurrency.lockutils [req-6f9142e3-ac59-4211-bf9a-d997c231f557 req-807ca2d1-1df4-4f36-8fcb-f056b0284767 service nova] Acquired lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 618.526802] env[62619]: DEBUG nova.network.neutron [req-6f9142e3-ac59-4211-bf9a-d997c231f557 req-807ca2d1-1df4-4f36-8fcb-f056b0284767 service nova] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Refreshing network info cache for port cfc22c12-4455-44de-8005-6c8a50fac191 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 618.528053] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4e75c02-efe2-433a-8724-90a69ab3963f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.538258] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd5db4b9-8492-4bce-ad7a-8b2c6ce6865b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.567764] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b79f2461-2b0f-4427-abb8-7a3a192e6230 could not be found. [ 618.567994] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 618.568233] env[62619]: INFO nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Took 0.04 seconds to destroy the instance on the hypervisor. [ 618.568434] env[62619]: DEBUG oslo.service.loopingcall [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 618.571563] env[62619]: DEBUG nova.compute.manager [-] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 618.571723] env[62619]: DEBUG nova.network.neutron [-] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 618.592451] env[62619]: DEBUG nova.network.neutron [-] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.778346] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81efe903-31cd-4bdf-a9ee-3e448db5d4ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.791278] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.797244] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f76bfb7e-7d7e-47be-b9ad-4e6bc3576383 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.836858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-852483d1-375a-4380-9e16-d6d1b1590552 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.854113] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84ca1c5a-2fa2-4dd4-a1cc-42cef575c888 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.859187] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 618.871288] env[62619]: DEBUG nova.compute.provider_tree [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 618.975609] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 618.975986] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e9668343-24f6-4851-987a-1f4f2277fd98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.989661] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 618.989661] env[62619]: value = "task-1364821" [ 618.989661] env[62619]: _type = "Task" [ 618.989661] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.000628] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364821, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.056118] env[62619]: DEBUG nova.network.neutron [req-6f9142e3-ac59-4211-bf9a-d997c231f557 req-807ca2d1-1df4-4f36-8fcb-f056b0284767 service nova] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 619.096313] env[62619]: DEBUG nova.network.neutron [-] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.152864] env[62619]: DEBUG nova.network.neutron [req-6f9142e3-ac59-4211-bf9a-d997c231f557 req-807ca2d1-1df4-4f36-8fcb-f056b0284767 service nova] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.341511] env[62619]: DEBUG nova.compute.manager [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 619.375043] env[62619]: DEBUG nova.scheduler.client.report [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 619.378237] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquiring lock "9735d6d1-eb10-46b4-a273-10b1351033f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.378452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "9735d6d1-eb10-46b4-a273-10b1351033f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.380770] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 619.380988] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 619.381153] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.381332] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 619.381469] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.381609] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 619.381807] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 619.381960] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 619.382155] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 619.382318] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 619.382485] env[62619]: DEBUG nova.virt.hardware [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 619.383781] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ffbc756-2ca0-460f-8b6d-ab65c650307c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.393873] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e63c562-8269-4b30-9b86-14b811280e42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.416112] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.425024] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Creating folder: Project (4363eed7b3124da799692e2ac131064a). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.425024] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e66aea24-0364-4617-a3b7-fe630c6f8479 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.435801] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Created folder: Project (4363eed7b3124da799692e2ac131064a) in parent group-v290436. [ 619.436057] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Creating folder: Instances. Parent ref: group-v290448. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 619.436333] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f97e50eb-fbe7-4098-972e-6b2687bd0886 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.447585] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Created folder: Instances in parent group-v290448. [ 619.447892] env[62619]: DEBUG oslo.service.loopingcall [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 619.448524] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 619.448851] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee2404a5-62e1-43f3-94a0-d8badfdbf800 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.471166] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.471166] env[62619]: value = "task-1364824" [ 619.471166] env[62619]: _type = "Task" [ 619.471166] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.478953] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364824, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.499756] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364821, 'name': PowerOffVM_Task, 'duration_secs': 0.223917} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.500042] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 619.500262] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 619.501023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04ce5a9-fee8-40d5-9dbf-b383652f319c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.509791] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 619.510252] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-92a3116d-f891-4fdb-9c21-0dc87a2d6179 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.542119] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 619.542451] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 619.542590] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleting the datastore file [datastore1] b7c425a1-a80d-4a62-a71f-d14fdf638cf7 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 619.542890] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b046935-4c62-4afb-9ac9-969b50dccf2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.552162] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 619.552162] env[62619]: value = "task-1364826" [ 619.552162] env[62619]: _type = "Task" [ 619.552162] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.562483] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364826, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.600527] env[62619]: INFO nova.compute.manager [-] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Took 1.03 seconds to deallocate network for instance. [ 619.603698] env[62619]: DEBUG nova.compute.claims [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 619.603984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 619.624390] env[62619]: DEBUG nova.compute.manager [req-1fa05ebb-0f2b-4c08-96aa-fc28d68935a4 req-24cede30-42a1-48a9-9bc3-58fe73467293 service nova] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Received event network-vif-deleted-cfc22c12-4455-44de-8005-6c8a50fac191 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 619.655903] env[62619]: DEBUG oslo_concurrency.lockutils [req-6f9142e3-ac59-4211-bf9a-d997c231f557 req-807ca2d1-1df4-4f36-8fcb-f056b0284767 service nova] Releasing lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 619.736437] env[62619]: INFO nova.compute.manager [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Rebuilding instance [ 619.792176] env[62619]: DEBUG nova.compute.manager [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 619.792883] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3fdfb15-ce5c-418a-94f5-b69f05720b5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.887023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.594s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 619.887023] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 619.888184] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.543s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 619.982104] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364824, 'name': CreateVM_Task, 'duration_secs': 0.342903} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.982104] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 619.982491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 619.982756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 619.983546] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 619.983898] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3f2e95d3-444f-466b-8abd-fe7447d4ab76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.991331] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 619.991331] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5276815c-6115-fcdf-c42b-803f33b1aed6" [ 619.991331] env[62619]: _type = "Task" [ 619.991331] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.001727] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5276815c-6115-fcdf-c42b-803f33b1aed6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.063478] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364826, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.117039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.063907] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 620.064233] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 620.064624] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 620.307779] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 620.307779] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cf08bfec-454f-471f-aafc-c77dbe1c9508 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.315767] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 620.315767] env[62619]: value = "task-1364827" [ 620.315767] env[62619]: _type = "Task" [ 620.315767] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.326300] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364827, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.396787] env[62619]: DEBUG nova.compute.utils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 620.398795] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 620.399231] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 620.468225] env[62619]: DEBUG nova.policy [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a17303d7d22a4b38960a424118331c3c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ef99584923c404bbf55e815f2792393', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 620.502277] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5276815c-6115-fcdf-c42b-803f33b1aed6, 'name': SearchDatastore_Task, 'duration_secs': 0.01743} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.504637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 620.504637] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.504637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 620.504637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 620.505150] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 620.505150] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49fab67c-c453-4acc-885e-9866071fcc28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.515451] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 620.515653] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 620.516419] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-472a388f-b119-4b9e-bf7f-de31842c685c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.524975] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 620.524975] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5221ec19-29cb-f5ee-e1c3-5c9a673a89bf" [ 620.524975] env[62619]: _type = "Task" [ 620.524975] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.536749] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5221ec19-29cb-f5ee-e1c3-5c9a673a89bf, 'name': SearchDatastore_Task, 'duration_secs': 0.008928} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.540078] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-84a4b606-08d8-46c1-8d5b-e2b57ed0b63a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.546640] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 620.546640] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fd0229-1430-0d87-bfb6-5d84f4c8a722" [ 620.546640] env[62619]: _type = "Task" [ 620.546640] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.557649] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fd0229-1430-0d87-bfb6-5d84f4c8a722, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.827784] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364827, 'name': PowerOffVM_Task, 'duration_secs': 0.156363} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 620.828106] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 620.828303] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 620.829131] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bd142ad-c5df-490a-ad93-fb204123350d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.837127] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 620.837400] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc12a07e-e16f-4f5c-bb0c-487c8513b590 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.842691] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c470f82-51e2-4425-9b61-16c840cf0198 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.850250] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d745f29f-1a52-42aa-b7b4-d1986b086216 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.888922] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a2f14ec-2e22-461e-af5f-f274a76f7dd9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.891952] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 620.891952] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 620.891952] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Deleting the datastore file [datastore1] 79a94ed1-1139-4194-8091-00b7b1562330 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 620.892234] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eed9f7e7-086f-4bf6-81a6-4074b6cecab8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.902399] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27d471e7-cbea-4ca4-b6b3-c6983aa7db4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 620.907900] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 620.907900] env[62619]: value = "task-1364829" [ 620.907900] env[62619]: _type = "Task" [ 620.907900] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 620.908434] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 620.923810] env[62619]: DEBUG nova.compute.provider_tree [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 620.931284] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364829, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.960151] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Successfully created port: 38a8e5b2-bc03-4720-9b2d-6905bfedbca5 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 621.058342] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fd0229-1430-0d87-bfb6-5d84f4c8a722, 'name': SearchDatastore_Task, 'duration_secs': 0.01105} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.058698] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 621.058971] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] d7404720-7cf0-41bf-a882-2cb6db2253bc/d7404720-7cf0-41bf-a882-2cb6db2253bc.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 621.059351] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-862ea6c0-366b-49df-a0a9-f4105098bc1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.068101] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 621.068101] env[62619]: value = "task-1364830" [ 621.068101] env[62619]: _type = "Task" [ 621.068101] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.081319] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364830, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.114615] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 621.114931] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 621.115133] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.115371] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 621.115584] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.116098] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 621.116471] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 621.116759] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 621.116952] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 621.117162] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 621.117352] env[62619]: DEBUG nova.virt.hardware [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 621.118407] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00765335-8017-443a-8192-3a07c7e91223 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.126844] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e6e7399-70b2-4b9f-b1b1-50ca1d8e52f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.143464] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 621.149472] env[62619]: DEBUG oslo.service.loopingcall [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 621.150095] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 621.150330] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b52b40a1-8b4f-4feb-85f5-ce86e288d8bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.170555] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 621.170555] env[62619]: value = "task-1364831" [ 621.170555] env[62619]: _type = "Task" [ 621.170555] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.179015] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364831, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.434025] env[62619]: DEBUG nova.scheduler.client.report [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 621.452360] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364829, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.099532} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.452360] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 621.452360] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 621.452360] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 621.579908] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364830, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.500242} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 621.581197] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] d7404720-7cf0-41bf-a882-2cb6db2253bc/d7404720-7cf0-41bf-a882-2cb6db2253bc.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 621.581197] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 621.581197] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-34c68da5-5c09-4241-aec0-1285e13dbfd6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 621.589571] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 621.589571] env[62619]: value = "task-1364832" [ 621.589571] env[62619]: _type = "Task" [ 621.589571] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 621.603020] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364832, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.682702] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364831, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 621.938923] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 621.941485] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.053s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 621.942200] env[62619]: ERROR nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Traceback (most recent call last): [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self.driver.spawn(context, instance, image_meta, [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] vm_ref = self.build_virtual_machine(instance, [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 621.942200] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] for vif in network_info: [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] return self._sync_wrapper(fn, *args, **kwargs) [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self.wait() [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self[:] = self._gt.wait() [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] return self._exit_event.wait() [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] result = hub.switch() [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 621.942674] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] return self.greenlet.switch() [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] result = function(*args, **kwargs) [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] return func(*args, **kwargs) [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] raise e [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] nwinfo = self.network_api.allocate_for_instance( [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] created_port_ids = self._update_ports_for_instance( [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] with excutils.save_and_reraise_exception(): [ 621.942990] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] self.force_reraise() [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] raise self.value [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] updated_port = self._update_port( [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] _ensure_no_port_binding_failure(port) [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] raise exception.PortBindingFailed(port_id=port['id']) [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] nova.exception.PortBindingFailed: Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. [ 621.943326] env[62619]: ERROR nova.compute.manager [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] [ 621.943641] env[62619]: DEBUG nova.compute.utils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 621.944880] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.170s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 621.946429] env[62619]: INFO nova.compute.claims [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 621.949634] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Build of instance d22ac031-3202-457c-8bb7-d557ad0fe9f9 was re-scheduled: Binding failed for port 96cc2528-5a4f-4884-97dc-080a74195033, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 621.950300] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 621.950300] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Acquiring lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 621.950404] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Acquired lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 621.950564] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 621.987894] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 621.988180] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 621.988332] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 621.988506] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 621.988924] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 621.988924] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 621.989169] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 621.989404] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 621.989575] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 621.989745] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 621.991080] env[62619]: DEBUG nova.virt.hardware [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 621.991080] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-929f80d1-bf1e-4ac1-a8aa-29f9eebb4758 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.000572] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-286dc6b8-31dc-42c3-8a8d-930f3d0a9ed1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.010449] env[62619]: DEBUG nova.compute.manager [req-9dfc8b29-d076-4384-b771-a588cf782b15 req-2ef3de46-0269-4492-8e45-bae96b50742b service nova] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Received event network-changed-38a8e5b2-bc03-4720-9b2d-6905bfedbca5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 622.010449] env[62619]: DEBUG nova.compute.manager [req-9dfc8b29-d076-4384-b771-a588cf782b15 req-2ef3de46-0269-4492-8e45-bae96b50742b service nova] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Refreshing instance network info cache due to event network-changed-38a8e5b2-bc03-4720-9b2d-6905bfedbca5. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 622.010449] env[62619]: DEBUG oslo_concurrency.lockutils [req-9dfc8b29-d076-4384-b771-a588cf782b15 req-2ef3de46-0269-4492-8e45-bae96b50742b service nova] Acquiring lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.010449] env[62619]: DEBUG oslo_concurrency.lockutils [req-9dfc8b29-d076-4384-b771-a588cf782b15 req-2ef3de46-0269-4492-8e45-bae96b50742b service nova] Acquired lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.010449] env[62619]: DEBUG nova.network.neutron [req-9dfc8b29-d076-4384-b771-a588cf782b15 req-2ef3de46-0269-4492-8e45-bae96b50742b service nova] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Refreshing network info cache for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 622.025648] env[62619]: ERROR nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. [ 622.025648] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 622.025648] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.025648] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 622.025648] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.025648] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 622.025648] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.025648] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 622.025648] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.025648] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 622.025648] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.025648] env[62619]: ERROR nova.compute.manager raise self.value [ 622.025648] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.025648] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 622.025648] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.025648] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 622.026155] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.026155] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 622.026155] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. [ 622.026155] env[62619]: ERROR nova.compute.manager [ 622.026155] env[62619]: Traceback (most recent call last): [ 622.026155] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 622.026155] env[62619]: listener.cb(fileno) [ 622.026155] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.026155] env[62619]: result = function(*args, **kwargs) [ 622.026155] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 622.026155] env[62619]: return func(*args, **kwargs) [ 622.026155] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.026155] env[62619]: raise e [ 622.026155] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.026155] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 622.026155] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.026155] env[62619]: created_port_ids = self._update_ports_for_instance( [ 622.026155] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.026155] env[62619]: with excutils.save_and_reraise_exception(): [ 622.026155] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.026155] env[62619]: self.force_reraise() [ 622.026155] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.026155] env[62619]: raise self.value [ 622.026155] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.026155] env[62619]: updated_port = self._update_port( [ 622.026155] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.026155] env[62619]: _ensure_no_port_binding_failure(port) [ 622.026155] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.026155] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 622.026828] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. [ 622.026828] env[62619]: Removing descriptor: 18 [ 622.028900] env[62619]: ERROR nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Traceback (most recent call last): [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] yield resources [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self.driver.spawn(context, instance, image_meta, [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self._vmops.spawn(context, instance, image_meta, injected_files, [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] vm_ref = self.build_virtual_machine(instance, [ 622.028900] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] vif_infos = vmwarevif.get_vif_info(self._session, [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] for vif in network_info: [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] return self._sync_wrapper(fn, *args, **kwargs) [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self.wait() [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self[:] = self._gt.wait() [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] return self._exit_event.wait() [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 622.030296] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] current.throw(*self._exc) [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] result = function(*args, **kwargs) [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] return func(*args, **kwargs) [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] raise e [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] nwinfo = self.network_api.allocate_for_instance( [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] created_port_ids = self._update_ports_for_instance( [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] with excutils.save_and_reraise_exception(): [ 622.030811] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self.force_reraise() [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] raise self.value [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] updated_port = self._update_port( [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] _ensure_no_port_binding_failure(port) [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] raise exception.PortBindingFailed(port_id=port['id']) [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] nova.exception.PortBindingFailed: Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. [ 622.031192] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] [ 622.031192] env[62619]: INFO nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Terminating instance [ 622.031534] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Acquiring lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.100021] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364832, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.106595} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.100458] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 622.101288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd661806-be1b-4b9d-b90e-addd8138c8b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.123716] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Reconfiguring VM instance instance-00000014 to attach disk [datastore2] d7404720-7cf0-41bf-a882-2cb6db2253bc/d7404720-7cf0-41bf-a882-2cb6db2253bc.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 622.124369] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-21562c60-297a-4602-810e-4274cc2b1637 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.145871] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 622.145871] env[62619]: value = "task-1364833" [ 622.145871] env[62619]: _type = "Task" [ 622.145871] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.156018] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364833, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.185130] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364831, 'name': CreateVM_Task, 'duration_secs': 0.611502} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.185130] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 622.185130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.185130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.185130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 622.185333] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7be60063-c4fc-40ac-912d-33d857b227c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.190838] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 622.190838] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d1df40-03e2-689c-7df6-50c2130366c1" [ 622.190838] env[62619]: _type = "Task" [ 622.190838] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.198905] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d1df40-03e2-689c-7df6-50c2130366c1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.472861] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.482839] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 622.483121] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 622.483466] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 622.483466] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 622.483605] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 622.483745] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 622.483944] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 622.484169] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 622.484288] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 622.484505] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 622.484606] env[62619]: DEBUG nova.virt.hardware [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 622.485740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f34431e-9c98-4451-9958-47f4b92cff6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.494369] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a361d8a-b477-44f2-b256-7947258cf54e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.508730] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 622.514430] env[62619]: DEBUG oslo.service.loopingcall [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 622.514590] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 622.518562] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4e0c3c5a-2850-4f5d-8afd-4753a5156900 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.538535] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 622.538535] env[62619]: value = "task-1364834" [ 622.538535] env[62619]: _type = "Task" [ 622.538535] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.546879] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364834, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.558310] env[62619]: DEBUG nova.network.neutron [req-9dfc8b29-d076-4384-b771-a588cf782b15 req-2ef3de46-0269-4492-8e45-bae96b50742b service nova] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 622.565382] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.669451] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364833, 'name': ReconfigVM_Task, 'duration_secs': 0.282879} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.670408] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Reconfigured VM instance instance-00000014 to attach disk [datastore2] d7404720-7cf0-41bf-a882-2cb6db2253bc/d7404720-7cf0-41bf-a882-2cb6db2253bc.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 622.671080] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-80b0843e-85ac-4659-9ea7-ead159fde525 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.681043] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 622.681043] env[62619]: value = "task-1364835" [ 622.681043] env[62619]: _type = "Task" [ 622.681043] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.690660] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364835, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 622.702865] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d1df40-03e2-689c-7df6-50c2130366c1, 'name': SearchDatastore_Task, 'duration_secs': 0.009213} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 622.703984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 622.703984] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 622.703984] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 622.704249] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 622.704450] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 622.704726] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8855f026-59a4-4874-9a93-48272a3b16b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.716290] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 622.716576] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 622.717830] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-182c917f-e2af-4a74-8cd3-22f47afb85e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 622.726316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Acquiring lock "ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 622.726540] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Lock "ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 622.728844] env[62619]: DEBUG nova.network.neutron [req-9dfc8b29-d076-4384-b771-a588cf782b15 req-2ef3de46-0269-4492-8e45-bae96b50742b service nova] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.732372] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 622.732372] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b68f38-a92d-f58c-d308-8f040125961a" [ 622.732372] env[62619]: _type = "Task" [ 622.732372] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 622.742986] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b68f38-a92d-f58c-d308-8f040125961a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.048578] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364834, 'name': CreateVM_Task, 'duration_secs': 0.340024} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.048849] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 623.049154] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.049301] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.049614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 623.049881] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b51c6523-16d6-4b20-8c12-75fee7c0f950 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.054743] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 623.054743] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525a13f0-c355-3317-d88f-83f6aed32efc" [ 623.054743] env[62619]: _type = "Task" [ 623.054743] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.065109] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525a13f0-c355-3317-d88f-83f6aed32efc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.068200] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Releasing lock "refresh_cache-d22ac031-3202-457c-8bb7-d557ad0fe9f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.068414] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 623.068595] env[62619]: DEBUG nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 623.068788] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 623.084494] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.193540] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364835, 'name': Rename_Task, 'duration_secs': 0.181619} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.193820] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 623.194084] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a48d33a9-f780-4037-bab7-8bdd928b1562 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.201656] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 623.201656] env[62619]: value = "task-1364836" [ 623.201656] env[62619]: _type = "Task" [ 623.201656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.212567] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364836, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.232169] env[62619]: DEBUG oslo_concurrency.lockutils [req-9dfc8b29-d076-4384-b771-a588cf782b15 req-2ef3de46-0269-4492-8e45-bae96b50742b service nova] Releasing lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.232386] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Acquired lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.232518] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 623.248986] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b68f38-a92d-f58c-d308-8f040125961a, 'name': SearchDatastore_Task, 'duration_secs': 0.018551} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.250416] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78ce5a92-3d2e-4536-b9bb-0f7730ce0ec8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.262680] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 623.262680] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a6bbff-6d1e-c33b-d753-b0b8f55e84cc" [ 623.262680] env[62619]: _type = "Task" [ 623.262680] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.281728] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a6bbff-6d1e-c33b-d753-b0b8f55e84cc, 'name': SearchDatastore_Task, 'duration_secs': 0.01555} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.281989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.282304] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] b7c425a1-a80d-4a62-a71f-d14fdf638cf7/b7c425a1-a80d-4a62-a71f-d14fdf638cf7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 623.282599] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-818bdc3f-a517-46b1-8bd0-63fc005729b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.293924] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 623.293924] env[62619]: value = "task-1364837" [ 623.293924] env[62619]: _type = "Task" [ 623.293924] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.305214] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364837, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.355243] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54fe78d9-dc97-4247-8c1f-e9e157d0cf57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.363670] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb45a158-b152-4481-9e9e-d11d95d3eb14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.398157] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fcd588-0b88-4177-8722-a1a7fa07711b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.407203] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-172ae2e8-6322-456a-852b-6d0238073db9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.421947] env[62619]: DEBUG nova.compute.provider_tree [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.444746] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "73145811-c355-462e-9a8e-ffccf2efe683" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 623.444993] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "73145811-c355-462e-9a8e-ffccf2efe683" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 623.573023] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525a13f0-c355-3317-d88f-83f6aed32efc, 'name': SearchDatastore_Task, 'duration_secs': 0.010803} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 623.573023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 623.573023] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 623.573023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 623.573273] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 623.573273] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 623.573273] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-315a779d-70d2-4f68-a7ac-58eeb6ee2019 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.589590] env[62619]: DEBUG nova.network.neutron [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.593438] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 623.593669] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 623.594734] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ae8ee0f-e680-4d4a-be2c-047077ce59e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.602226] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 623.602226] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c3d5e6-3c51-b3d5-f00a-1b90cf45383e" [ 623.602226] env[62619]: _type = "Task" [ 623.602226] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 623.614020] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c3d5e6-3c51-b3d5-f00a-1b90cf45383e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.716790] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364836, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.762313] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.807017] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364837, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 623.863688] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 623.924795] env[62619]: DEBUG nova.scheduler.client.report [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 624.030692] env[62619]: DEBUG nova.compute.manager [req-fcebf123-ab03-49f2-b989-96ab724bdddc req-fb48a12d-e915-4555-ab13-252de3509072 service nova] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Received event network-vif-deleted-38a8e5b2-bc03-4720-9b2d-6905bfedbca5 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 624.096315] env[62619]: INFO nova.compute.manager [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] [instance: d22ac031-3202-457c-8bb7-d557ad0fe9f9] Took 1.03 seconds to deallocate network for instance. [ 624.112873] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c3d5e6-3c51-b3d5-f00a-1b90cf45383e, 'name': SearchDatastore_Task, 'duration_secs': 0.060875} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.113702] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b6eaf69-a03f-430b-af0d-e52df4c8fc63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.118802] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 624.118802] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5275aae7-7dfb-1f0e-b7b6-18c69bafa965" [ 624.118802] env[62619]: _type = "Task" [ 624.118802] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.126342] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5275aae7-7dfb-1f0e-b7b6-18c69bafa965, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.214567] env[62619]: DEBUG oslo_vmware.api [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364836, 'name': PowerOnVM_Task, 'duration_secs': 0.787779} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.214567] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 624.214567] env[62619]: INFO nova.compute.manager [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Took 4.87 seconds to spawn the instance on the hypervisor. [ 624.214567] env[62619]: DEBUG nova.compute.manager [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 624.215483] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27756d10-1ab8-4943-9194-38a723868bd7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.304657] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364837, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.577886} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.305030] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] b7c425a1-a80d-4a62-a71f-d14fdf638cf7/b7c425a1-a80d-4a62-a71f-d14fdf638cf7.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 624.305287] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 624.305596] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d8791e47-9cb4-446a-8e27-86375a4f45fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.313600] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 624.313600] env[62619]: value = "task-1364838" [ 624.313600] env[62619]: _type = "Task" [ 624.313600] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.322442] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364838, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.366660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Releasing lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.367127] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 624.367327] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 624.367636] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bfe1881e-13d8-4edb-99c0-9a6e3a610fd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.377993] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4c8b04-b9e9-4692-8871-6d1f8b9c8df1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.406234] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ee06b107-4352-4491-b9bb-7faa7ccb5571 could not be found. [ 624.406434] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 624.406613] env[62619]: INFO nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Took 0.04 seconds to destroy the instance on the hypervisor. [ 624.406858] env[62619]: DEBUG oslo.service.loopingcall [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.407111] env[62619]: DEBUG nova.compute.manager [-] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 624.407204] env[62619]: DEBUG nova.network.neutron [-] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 624.426310] env[62619]: DEBUG nova.network.neutron [-] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 624.429731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.485s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 624.430650] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.541s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 624.630218] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5275aae7-7dfb-1f0e-b7b6-18c69bafa965, 'name': SearchDatastore_Task, 'duration_secs': 0.032941} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.631264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 624.631447] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 624.631698] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d2b0d6a5-8c0d-4cf4-a75d-7893db5e79cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.639281] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 624.639281] env[62619]: value = "task-1364839" [ 624.639281] env[62619]: _type = "Task" [ 624.639281] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.647545] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.732325] env[62619]: INFO nova.compute.manager [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Took 24.05 seconds to build instance. [ 624.824021] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364838, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.086203} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 624.824293] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 624.825111] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38081ed7-7209-4577-b4e5-a586b48103c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.844807] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Reconfiguring VM instance instance-00000011 to attach disk [datastore2] b7c425a1-a80d-4a62-a71f-d14fdf638cf7/b7c425a1-a80d-4a62-a71f-d14fdf638cf7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 624.844924] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ce74014d-0a30-4098-8be6-a2274a0c5537 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.864508] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 624.864508] env[62619]: value = "task-1364840" [ 624.864508] env[62619]: _type = "Task" [ 624.864508] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.872796] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364840, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.928791] env[62619]: DEBUG nova.network.neutron [-] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.934286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Acquiring lock "820e2ce3-a11a-4c69-947b-9de6dffff68e" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 624.934532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Lock "820e2ce3-a11a-4c69-947b-9de6dffff68e" acquired by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 625.120290] env[62619]: INFO nova.scheduler.client.report [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Deleted allocations for instance d22ac031-3202-457c-8bb7-d557ad0fe9f9 [ 625.152399] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364839, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.234768] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f95a4e27-1d36-4b63-a2fe-f08f1e4eaa87 tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "d7404720-7cf0-41bf-a882-2cb6db2253bc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.203s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.317213] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-499dad95-a53e-486e-9058-8a8e253a2a5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.325776] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3390aaf-d721-42fa-8fe1-9ae320fe78aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.363124] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8b73c0-4c5a-4133-8469-33628412c835 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.376273] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364840, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.379667] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890cb364-5229-4f93-8b07-687112f0043d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.398600] env[62619]: DEBUG nova.compute.provider_tree [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 625.432022] env[62619]: INFO nova.compute.manager [-] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Took 1.02 seconds to deallocate network for instance. [ 625.436889] env[62619]: DEBUG nova.compute.claims [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 625.436982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 625.440548] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Lock "820e2ce3-a11a-4c69-947b-9de6dffff68e" "released" by "nova.compute.manager.ComputeManager._validate_instance_group_policy.._do_validation" :: held 0.505s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.440650] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 625.631553] env[62619]: DEBUG oslo_concurrency.lockutils [None req-096178a5-ebab-44f8-845b-079141581102 tempest-FloatingIPsAssociationTestJSON-2140901279 tempest-FloatingIPsAssociationTestJSON-2140901279-project-member] Lock "d22ac031-3202-457c-8bb7-d557ad0fe9f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.577s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 625.654667] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364839, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.011773} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.654912] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 625.655144] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 625.655400] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f9c8b552-7b59-4fe8-885c-e3794869c7b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.662683] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 625.662683] env[62619]: value = "task-1364841" [ 625.662683] env[62619]: _type = "Task" [ 625.662683] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.672681] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364841, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.740139] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 625.879501] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364840, 'name': ReconfigVM_Task, 'duration_secs': 0.992538} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.881338] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Reconfigured VM instance instance-00000011 to attach disk [datastore2] b7c425a1-a80d-4a62-a71f-d14fdf638cf7/b7c425a1-a80d-4a62-a71f-d14fdf638cf7.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 625.881964] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d6bb8664-4218-4542-88ad-16be52b9954a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.890120] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 625.890120] env[62619]: value = "task-1364842" [ 625.890120] env[62619]: _type = "Task" [ 625.890120] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.899588] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364842, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.901528] env[62619]: DEBUG nova.scheduler.client.report [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 625.938855] env[62619]: DEBUG nova.compute.manager [None req-fd28123a-605c-4eb7-83c0-e1ddde9c2bfd tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 625.939801] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4220bd4-be19-44ff-a94d-62c9c52a2c2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.948042] env[62619]: DEBUG nova.compute.utils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 625.950900] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 625.951095] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 626.042742] env[62619]: DEBUG nova.policy [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '997faa5915d448d4ba2c45f835ffc44c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ee1a8291e9a8471888c38e3a50371732', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 626.110862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquiring lock "d7404720-7cf0-41bf-a882-2cb6db2253bc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.111513] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "d7404720-7cf0-41bf-a882-2cb6db2253bc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.111755] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquiring lock "d7404720-7cf0-41bf-a882-2cb6db2253bc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.111993] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "d7404720-7cf0-41bf-a882-2cb6db2253bc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.112128] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "d7404720-7cf0-41bf-a882-2cb6db2253bc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.115843] env[62619]: INFO nova.compute.manager [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Terminating instance [ 626.117570] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquiring lock "refresh_cache-d7404720-7cf0-41bf-a882-2cb6db2253bc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.117750] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquired lock "refresh_cache-d7404720-7cf0-41bf-a882-2cb6db2253bc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.117930] env[62619]: DEBUG nova.network.neutron [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 626.135275] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 626.184227] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364841, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.168665} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.184227] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 626.184227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d3ea5c3-a090-483f-9d91-59eb701e6a64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.206727] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Reconfiguring VM instance instance-0000000e to attach disk [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 626.207516] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-339329fd-8ffd-4ed8-b048-bda1277c724d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.228068] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 626.228068] env[62619]: value = "task-1364843" [ 626.228068] env[62619]: _type = "Task" [ 626.228068] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.239643] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364843, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.269248] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.397199] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Successfully created port: ea6ce5f8-690a-4e8e-8143-d1b062fe542b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 626.404359] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364842, 'name': Rename_Task, 'duration_secs': 0.389577} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.404967] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 626.404967] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a75f09c-efc7-49ee-932e-35b615954c49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.407200] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.976s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 626.407866] env[62619]: ERROR nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Traceback (most recent call last): [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self.driver.spawn(context, instance, image_meta, [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] vm_ref = self.build_virtual_machine(instance, [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] vif_infos = vmwarevif.get_vif_info(self._session, [ 626.407866] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] for vif in network_info: [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] return self._sync_wrapper(fn, *args, **kwargs) [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self.wait() [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self[:] = self._gt.wait() [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] return self._exit_event.wait() [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] result = hub.switch() [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 626.408166] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] return self.greenlet.switch() [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] result = function(*args, **kwargs) [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] return func(*args, **kwargs) [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] raise e [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] nwinfo = self.network_api.allocate_for_instance( [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] created_port_ids = self._update_ports_for_instance( [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] with excutils.save_and_reraise_exception(): [ 626.408530] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] self.force_reraise() [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] raise self.value [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] updated_port = self._update_port( [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] _ensure_no_port_binding_failure(port) [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] raise exception.PortBindingFailed(port_id=port['id']) [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] nova.exception.PortBindingFailed: Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. [ 626.408837] env[62619]: ERROR nova.compute.manager [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] [ 626.410513] env[62619]: DEBUG nova.compute.utils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 626.411704] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.065s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 626.413299] env[62619]: INFO nova.compute.claims [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 626.416380] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Build of instance 5680b562-8e8d-4fed-8b48-ec7add23ed7b was re-scheduled: Binding failed for port c495a2a5-cb70-40fe-b790-8849bc1d1fe5, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 626.416845] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 626.417083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 626.417505] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 626.417505] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 626.421937] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 626.421937] env[62619]: value = "task-1364844" [ 626.421937] env[62619]: _type = "Task" [ 626.421937] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.436263] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364844, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.456615] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 626.458463] env[62619]: INFO nova.compute.manager [None req-fd28123a-605c-4eb7-83c0-e1ddde9c2bfd tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] instance snapshotting [ 626.459690] env[62619]: DEBUG nova.objects.instance [None req-fd28123a-605c-4eb7-83c0-e1ddde9c2bfd tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lazy-loading 'flavor' on Instance uuid d7404720-7cf0-41bf-a882-2cb6db2253bc {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 626.645517] env[62619]: DEBUG nova.network.neutron [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.669097] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 626.742898] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364843, 'name': ReconfigVM_Task, 'duration_secs': 0.348795} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 626.743784] env[62619]: DEBUG nova.network.neutron [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 626.746138] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Reconfigured VM instance instance-0000000e to attach disk [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330/79a94ed1-1139-4194-8091-00b7b1562330.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 626.750038] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b60962ab-6829-4dd9-b77b-677105498c9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.758344] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 626.758344] env[62619]: value = "task-1364845" [ 626.758344] env[62619]: _type = "Task" [ 626.758344] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 626.776259] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364845, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.941370] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364844, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 626.957941] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 626.971129] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4937d87a-ef72-4450-9620-1efa2dc08895 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 626.993398] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d04f26-30c8-42de-aea8-e0f5601ce26b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.117748] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 627.250208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Releasing lock "refresh_cache-d7404720-7cf0-41bf-a882-2cb6db2253bc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.250702] env[62619]: DEBUG nova.compute.manager [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 627.250901] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 627.251870] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-662f129e-b542-445f-9284-b489fb0e5a93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.264715] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 627.268602] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6bfa4866-2a36-4b9a-8afa-39e1c21bcfd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.279290] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364845, 'name': Rename_Task, 'duration_secs': 0.251807} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.280916] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 627.281346] env[62619]: DEBUG oslo_vmware.api [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 627.281346] env[62619]: value = "task-1364846" [ 627.281346] env[62619]: _type = "Task" [ 627.281346] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.281593] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b71dbae1-acd8-4583-8d8d-046abbf42464 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.935821] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 627.937880] env[62619]: DEBUG nova.compute.manager [None req-fd28123a-605c-4eb7-83c0-e1ddde9c2bfd tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Instance disappeared during snapshot {{(pid=62619) _snapshot_instance /opt/stack/nova/nova/compute/manager.py:4494}} [ 627.939741] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-5680b562-8e8d-4fed-8b48-ec7add23ed7b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 627.940077] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 627.940158] env[62619]: DEBUG nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 627.940337] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 627.942984] env[62619]: WARNING oslo_vmware.common.loopingcall [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] task run outlasted interval by 0.15969500000000003 sec [ 627.949939] env[62619]: DEBUG nova.compute.manager [req-fe0782bc-56f8-4f8f-a671-9402e9d9ee7b req-caa53330-2960-49fc-ac4a-5e88ff2e1fd5 service nova] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Received event network-changed-ea6ce5f8-690a-4e8e-8143-d1b062fe542b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 627.950229] env[62619]: DEBUG nova.compute.manager [req-fe0782bc-56f8-4f8f-a671-9402e9d9ee7b req-caa53330-2960-49fc-ac4a-5e88ff2e1fd5 service nova] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Refreshing instance network info cache due to event network-changed-ea6ce5f8-690a-4e8e-8143-d1b062fe542b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 627.950359] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe0782bc-56f8-4f8f-a671-9402e9d9ee7b req-caa53330-2960-49fc-ac4a-5e88ff2e1fd5 service nova] Acquiring lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 627.950515] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe0782bc-56f8-4f8f-a671-9402e9d9ee7b req-caa53330-2960-49fc-ac4a-5e88ff2e1fd5 service nova] Acquired lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 627.950643] env[62619]: DEBUG nova.network.neutron [req-fe0782bc-56f8-4f8f-a671-9402e9d9ee7b req-caa53330-2960-49fc-ac4a-5e88ff2e1fd5 service nova] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Refreshing network info cache for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 627.952502] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Waiting for the task: (returnval){ [ 627.952502] env[62619]: value = "task-1364847" [ 627.952502] env[62619]: _type = "Task" [ 627.952502] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 627.964909] env[62619]: DEBUG oslo_vmware.api [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364844, 'name': PowerOnVM_Task, 'duration_secs': 1.066437} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.965082] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 627.965329] env[62619]: DEBUG nova.compute.manager [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 627.970620] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42731166-5a1e-4ee2-86f1-d678e62f1af9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.973476] env[62619]: DEBUG oslo_vmware.api [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364846, 'name': PowerOffVM_Task, 'duration_secs': 0.208788} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 627.976839] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 627.977064] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 627.979287] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c4fdf8a2-0789-4117-887c-b3813b540c21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 627.980452] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.986609] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364847, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.005052] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 628.005645] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 628.005970] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 628.006350] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 628.006617] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 628.006913] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 628.007191] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 628.007448] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 628.008291] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 628.008291] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 628.008409] env[62619]: DEBUG nova.virt.hardware [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 628.011983] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0443f95-0db1-47f1-b987-3fcf3a59314b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.025913] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 628.026457] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 628.026882] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Deleting the datastore file [datastore2] d7404720-7cf0-41bf-a882-2cb6db2253bc {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 628.029370] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-eb3a996b-750c-4f9e-b07e-f92818ae1596 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.035901] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac4879e6-37be-4e58-bcbf-afee760a2ac9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.052778] env[62619]: DEBUG oslo_vmware.api [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for the task: (returnval){ [ 628.052778] env[62619]: value = "task-1364849" [ 628.052778] env[62619]: _type = "Task" [ 628.052778] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.064325] env[62619]: DEBUG oslo_vmware.api [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.135038] env[62619]: DEBUG nova.compute.manager [None req-fd28123a-605c-4eb7-83c0-e1ddde9c2bfd tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Found 0 images (rotation: 2) {{(pid=62619) _rotate_backups /opt/stack/nova/nova/compute/manager.py:4554}} [ 628.344054] env[62619]: ERROR nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. [ 628.344054] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 628.344054] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.344054] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 628.344054] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.344054] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 628.344054] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.344054] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 628.344054] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.344054] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 628.344054] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.344054] env[62619]: ERROR nova.compute.manager raise self.value [ 628.344054] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.344054] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 628.344054] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.344054] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 628.344673] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.344673] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 628.344673] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. [ 628.344673] env[62619]: ERROR nova.compute.manager [ 628.344673] env[62619]: Traceback (most recent call last): [ 628.344673] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 628.344673] env[62619]: listener.cb(fileno) [ 628.344673] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.344673] env[62619]: result = function(*args, **kwargs) [ 628.344673] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.344673] env[62619]: return func(*args, **kwargs) [ 628.344673] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.344673] env[62619]: raise e [ 628.344673] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.344673] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 628.344673] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.344673] env[62619]: created_port_ids = self._update_ports_for_instance( [ 628.344673] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.344673] env[62619]: with excutils.save_and_reraise_exception(): [ 628.344673] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.344673] env[62619]: self.force_reraise() [ 628.344673] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.344673] env[62619]: raise self.value [ 628.344673] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.344673] env[62619]: updated_port = self._update_port( [ 628.344673] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.344673] env[62619]: _ensure_no_port_binding_failure(port) [ 628.344673] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.344673] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 628.345452] env[62619]: nova.exception.PortBindingFailed: Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. [ 628.345452] env[62619]: Removing descriptor: 17 [ 628.345452] env[62619]: ERROR nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Traceback (most recent call last): [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] yield resources [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self.driver.spawn(context, instance, image_meta, [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 628.345452] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] vm_ref = self.build_virtual_machine(instance, [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] vif_infos = vmwarevif.get_vif_info(self._session, [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] for vif in network_info: [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] return self._sync_wrapper(fn, *args, **kwargs) [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self.wait() [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self[:] = self._gt.wait() [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] return self._exit_event.wait() [ 628.345795] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] result = hub.switch() [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] return self.greenlet.switch() [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] result = function(*args, **kwargs) [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] return func(*args, **kwargs) [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] raise e [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] nwinfo = self.network_api.allocate_for_instance( [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 628.346161] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] created_port_ids = self._update_ports_for_instance( [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] with excutils.save_and_reraise_exception(): [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self.force_reraise() [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] raise self.value [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] updated_port = self._update_port( [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] _ensure_no_port_binding_failure(port) [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 628.346512] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] raise exception.PortBindingFailed(port_id=port['id']) [ 628.346825] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] nova.exception.PortBindingFailed: Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. [ 628.346825] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] [ 628.346825] env[62619]: INFO nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Terminating instance [ 628.349885] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Acquiring lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 628.408132] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3df999-ff55-4096-8ab3-26eec84f6257 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.416831] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6e4f4b8-80d6-4905-965f-bd67f0804e96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.450071] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4054ad-2b60-45c5-af9c-02a5ffcc8ba4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.464046] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-961589ea-7c22-4aa3-8072-17a2fd2e4588 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.470368] env[62619]: DEBUG oslo_vmware.api [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Task: {'id': task-1364847, 'name': PowerOnVM_Task, 'duration_secs': 0.900798} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.471152] env[62619]: DEBUG nova.network.neutron [req-fe0782bc-56f8-4f8f-a671-9402e9d9ee7b req-caa53330-2960-49fc-ac4a-5e88ff2e1fd5 service nova] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.480710] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 628.480921] env[62619]: DEBUG nova.compute.manager [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 628.481428] env[62619]: DEBUG nova.compute.provider_tree [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 628.484996] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b72d815-3f6a-4567-a6a2-61f9d6c176af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.490449] env[62619]: DEBUG nova.network.neutron [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.505886] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 628.563156] env[62619]: DEBUG oslo_vmware.api [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Task: {'id': task-1364849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.211323} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 628.565362] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 628.565603] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 628.565729] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 628.565893] env[62619]: INFO nova.compute.manager [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Took 1.32 seconds to destroy the instance on the hypervisor. [ 628.566138] env[62619]: DEBUG oslo.service.loopingcall [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.566329] env[62619]: DEBUG nova.compute.manager [-] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 628.566422] env[62619]: DEBUG nova.network.neutron [-] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 628.584444] env[62619]: DEBUG nova.network.neutron [-] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.605026] env[62619]: DEBUG nova.network.neutron [req-fe0782bc-56f8-4f8f-a671-9402e9d9ee7b req-caa53330-2960-49fc-ac4a-5e88ff2e1fd5 service nova] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.988286] env[62619]: DEBUG nova.scheduler.client.report [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 628.995192] env[62619]: INFO nova.compute.manager [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 5680b562-8e8d-4fed-8b48-ec7add23ed7b] Took 1.05 seconds to deallocate network for instance. [ 629.004375] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.090018] env[62619]: DEBUG nova.network.neutron [-] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.107899] env[62619]: DEBUG oslo_concurrency.lockutils [req-fe0782bc-56f8-4f8f-a671-9402e9d9ee7b req-caa53330-2960-49fc-ac4a-5e88ff2e1fd5 service nova] Releasing lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 629.108336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Acquired lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.108518] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 629.116078] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.116316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.116520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.116696] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.116859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.118783] env[62619]: INFO nova.compute.manager [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Terminating instance [ 629.120523] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "refresh_cache-b7c425a1-a80d-4a62-a71f-d14fdf638cf7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.120674] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquired lock "refresh_cache-b7c425a1-a80d-4a62-a71f-d14fdf638cf7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.120832] env[62619]: DEBUG nova.network.neutron [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 629.284395] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "79a94ed1-1139-4194-8091-00b7b1562330" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.285010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "79a94ed1-1139-4194-8091-00b7b1562330" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.285237] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "79a94ed1-1139-4194-8091-00b7b1562330-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 629.285443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "79a94ed1-1139-4194-8091-00b7b1562330-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.285628] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "79a94ed1-1139-4194-8091-00b7b1562330-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.287833] env[62619]: INFO nova.compute.manager [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Terminating instance [ 629.289680] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "refresh_cache-79a94ed1-1139-4194-8091-00b7b1562330" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 629.289851] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquired lock "refresh_cache-79a94ed1-1139-4194-8091-00b7b1562330" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 629.290044] env[62619]: DEBUG nova.network.neutron [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 629.493434] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.082s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 629.493971] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 629.496981] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.652s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 629.591488] env[62619]: INFO nova.compute.manager [-] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Took 1.02 seconds to deallocate network for instance. [ 629.627310] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.638312] env[62619]: DEBUG nova.network.neutron [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.683837] env[62619]: DEBUG nova.network.neutron [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.739854] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.806297] env[62619]: DEBUG nova.network.neutron [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 629.814080] env[62619]: DEBUG nova.compute.manager [req-1d0af6ae-0915-4e12-94ff-4364ea793c33 req-ad00557f-9b22-4bfd-a403-62a19b4c6a90 service nova] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Received event network-vif-deleted-ea6ce5f8-690a-4e8e-8143-d1b062fe542b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 629.875020] env[62619]: DEBUG nova.network.neutron [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.001471] env[62619]: DEBUG nova.compute.utils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 630.006403] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 630.006574] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 630.049307] env[62619]: INFO nova.scheduler.client.report [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted allocations for instance 5680b562-8e8d-4fed-8b48-ec7add23ed7b [ 630.056349] env[62619]: DEBUG nova.policy [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '460066d6536646b692ffb99256f3b5be', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'da967c3344254c2484ced9a9afb8ede6', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 630.098733] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 630.186864] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Releasing lock "refresh_cache-b7c425a1-a80d-4a62-a71f-d14fdf638cf7" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.187349] env[62619]: DEBUG nova.compute.manager [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 630.187567] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.188634] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-690ca779-be34-44fd-9548-43a34ab2ec49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.197628] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 630.197925] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8752cb5b-2214-43a8-bdd9-a4006b46d5e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.205759] env[62619]: DEBUG oslo_vmware.api [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 630.205759] env[62619]: value = "task-1364850" [ 630.205759] env[62619]: _type = "Task" [ 630.205759] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.217517] env[62619]: DEBUG oslo_vmware.api [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364850, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.243271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Releasing lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.243271] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 630.243271] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.243271] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-105597d0-afbd-4f96-b9d9-c930e0713721 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.252666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd9844db-f5e1-4bc4-b111-1156821284cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.286874] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 972e1187-09ee-4703-a3bc-7eb213a5c52e could not be found. [ 630.287127] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 630.287310] env[62619]: INFO nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 630.287562] env[62619]: DEBUG oslo.service.loopingcall [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.287830] env[62619]: DEBUG nova.compute.manager [-] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 630.287926] env[62619]: DEBUG nova.network.neutron [-] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 630.306454] env[62619]: DEBUG nova.network.neutron [-] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 630.377255] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Releasing lock "refresh_cache-79a94ed1-1139-4194-8091-00b7b1562330" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 630.377730] env[62619]: DEBUG nova.compute.manager [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 630.377954] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 630.378831] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08144a3a-d0f6-4e60-9780-40a6264ff911 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.389441] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 630.389703] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8e2a9085-323a-4106-aa73-556b6beb41b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.398303] env[62619]: DEBUG oslo_vmware.api [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 630.398303] env[62619]: value = "task-1364851" [ 630.398303] env[62619]: _type = "Task" [ 630.398303] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.411259] env[62619]: DEBUG oslo_vmware.api [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364851, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.441054] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14e59cdd-68d0-45ed-8675-9948702164f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.451806] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4487a462-ec3d-434a-b242-2687a33d8dba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.484685] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Successfully created port: 758127a2-1efd-462c-94e0-cf1ca05545ae {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 630.487107] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf250f99-adeb-4477-a3c8-6486061e3782 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.496385] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebb7d9fa-c1f9-437d-8c62-8d9911714dae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.512487] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 630.521034] env[62619]: DEBUG nova.compute.provider_tree [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.561202] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e24a86ef-9e07-4b82-b063-1f4956e3babc tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "5680b562-8e8d-4fed-8b48-ec7add23ed7b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.368s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 630.718820] env[62619]: DEBUG oslo_vmware.api [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364850, 'name': PowerOffVM_Task, 'duration_secs': 0.147585} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.719195] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 630.719600] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 630.719924] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7ee5debd-d89a-4432-9d8e-9c759c0ebf4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.749583] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 630.749923] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 630.750141] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleting the datastore file [datastore2] b7c425a1-a80d-4a62-a71f-d14fdf638cf7 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 630.750412] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1ab5f202-74a1-4efb-b964-e64e21a5235f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.758204] env[62619]: DEBUG oslo_vmware.api [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for the task: (returnval){ [ 630.758204] env[62619]: value = "task-1364853" [ 630.758204] env[62619]: _type = "Task" [ 630.758204] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.770068] env[62619]: DEBUG oslo_vmware.api [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364853, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.810378] env[62619]: DEBUG nova.network.neutron [-] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 630.913121] env[62619]: DEBUG oslo_vmware.api [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364851, 'name': PowerOffVM_Task, 'duration_secs': 0.146856} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.913463] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 630.913636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 630.913898] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c24416d7-ef98-45fb-b471-e6e085fd8cdc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.946899] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 630.947256] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 630.947581] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleting the datastore file [datastore2] 79a94ed1-1139-4194-8091-00b7b1562330 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 630.947997] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3f9436fa-c3ef-4592-8dac-f8acc2d189ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.958702] env[62619]: DEBUG oslo_vmware.api [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for the task: (returnval){ [ 630.958702] env[62619]: value = "task-1364855" [ 630.958702] env[62619]: _type = "Task" [ 630.958702] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.969874] env[62619]: DEBUG oslo_vmware.api [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.025262] env[62619]: DEBUG nova.scheduler.client.report [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 631.062770] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 631.274238] env[62619]: DEBUG oslo_vmware.api [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Task: {'id': task-1364853, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.453028} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.274842] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 631.275371] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 631.276177] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 631.276650] env[62619]: INFO nova.compute.manager [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Took 1.09 seconds to destroy the instance on the hypervisor. [ 631.277268] env[62619]: DEBUG oslo.service.loopingcall [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.277589] env[62619]: DEBUG nova.compute.manager [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 631.277756] env[62619]: DEBUG nova.network.neutron [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 631.295829] env[62619]: DEBUG nova.network.neutron [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.316447] env[62619]: INFO nova.compute.manager [-] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Took 1.03 seconds to deallocate network for instance. [ 631.317519] env[62619]: DEBUG nova.compute.claims [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 631.318538] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.473196] env[62619]: DEBUG oslo_vmware.api [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Task: {'id': task-1364855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256158} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 631.473196] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 631.473366] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 631.473541] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 631.473712] env[62619]: INFO nova.compute.manager [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Took 1.10 seconds to destroy the instance on the hypervisor. [ 631.473964] env[62619]: DEBUG oslo.service.loopingcall [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 631.474174] env[62619]: DEBUG nova.compute.manager [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 631.474271] env[62619]: DEBUG nova.network.neutron [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 631.502633] env[62619]: DEBUG nova.network.neutron [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 631.530269] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 631.532699] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.036s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 631.533844] env[62619]: ERROR nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Traceback (most recent call last): [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self.driver.spawn(context, instance, image_meta, [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] vm_ref = self.build_virtual_machine(instance, [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] vif_infos = vmwarevif.get_vif_info(self._session, [ 631.533844] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] for vif in network_info: [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] return self._sync_wrapper(fn, *args, **kwargs) [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self.wait() [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self[:] = self._gt.wait() [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] return self._exit_event.wait() [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] result = hub.switch() [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 631.534400] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] return self.greenlet.switch() [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] result = function(*args, **kwargs) [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] return func(*args, **kwargs) [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] raise e [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] nwinfo = self.network_api.allocate_for_instance( [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] created_port_ids = self._update_ports_for_instance( [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] with excutils.save_and_reraise_exception(): [ 631.534898] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] self.force_reraise() [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] raise self.value [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] updated_port = self._update_port( [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] _ensure_no_port_binding_failure(port) [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] raise exception.PortBindingFailed(port_id=port['id']) [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] nova.exception.PortBindingFailed: Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. [ 631.535401] env[62619]: ERROR nova.compute.manager [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] [ 631.536114] env[62619]: DEBUG nova.compute.utils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 631.536360] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.417s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.538136] env[62619]: INFO nova.compute.claims [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 631.541267] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Build of instance 7f17ce32-e674-4275-a313-a3f69dde2ee9 was re-scheduled: Binding failed for port 539a2943-0cc1-46ab-b932-112b4866f823, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 631.541959] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 631.541959] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquiring lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.541959] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Acquired lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.542117] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 631.568141] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 631.568406] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 631.568562] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 631.568763] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 631.568921] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 631.569075] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 631.569278] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 631.569428] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 631.569587] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 631.569740] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 631.569907] env[62619]: DEBUG nova.virt.hardware [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 631.573169] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6159dda7-e1ba-46fa-80fc-d3a52ae67bae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.582298] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36a8745b-344c-444b-bef6-7a2fe8819075 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 631.588491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.798406] env[62619]: DEBUG nova.network.neutron [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.808703] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "c307cc2f-d0c9-49ab-aafa-768a34199f0c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 631.809055] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "c307cc2f-d0c9-49ab-aafa-768a34199f0c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 631.843656] env[62619]: DEBUG nova.compute.manager [req-e8458eda-e053-40dc-b77a-abfb186274a1 req-a3dfec85-223e-4e1c-968d-ad24f13cf973 service nova] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Received event network-changed-758127a2-1efd-462c-94e0-cf1ca05545ae {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 631.843851] env[62619]: DEBUG nova.compute.manager [req-e8458eda-e053-40dc-b77a-abfb186274a1 req-a3dfec85-223e-4e1c-968d-ad24f13cf973 service nova] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Refreshing instance network info cache due to event network-changed-758127a2-1efd-462c-94e0-cf1ca05545ae. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 631.844303] env[62619]: DEBUG oslo_concurrency.lockutils [req-e8458eda-e053-40dc-b77a-abfb186274a1 req-a3dfec85-223e-4e1c-968d-ad24f13cf973 service nova] Acquiring lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 631.844464] env[62619]: DEBUG oslo_concurrency.lockutils [req-e8458eda-e053-40dc-b77a-abfb186274a1 req-a3dfec85-223e-4e1c-968d-ad24f13cf973 service nova] Acquired lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 631.844626] env[62619]: DEBUG nova.network.neutron [req-e8458eda-e053-40dc-b77a-abfb186274a1 req-a3dfec85-223e-4e1c-968d-ad24f13cf973 service nova] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Refreshing network info cache for port 758127a2-1efd-462c-94e0-cf1ca05545ae {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 632.006824] env[62619]: DEBUG nova.network.neutron [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.064763] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.079628] env[62619]: ERROR nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. [ 632.079628] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 632.079628] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.079628] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 632.079628] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.079628] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 632.079628] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.079628] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 632.079628] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.079628] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 632.079628] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.079628] env[62619]: ERROR nova.compute.manager raise self.value [ 632.079628] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.079628] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 632.079628] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.079628] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 632.080123] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.080123] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 632.080123] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. [ 632.080123] env[62619]: ERROR nova.compute.manager [ 632.080123] env[62619]: Traceback (most recent call last): [ 632.080123] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 632.080123] env[62619]: listener.cb(fileno) [ 632.080123] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.080123] env[62619]: result = function(*args, **kwargs) [ 632.080123] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 632.080123] env[62619]: return func(*args, **kwargs) [ 632.080123] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.080123] env[62619]: raise e [ 632.080123] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.080123] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 632.080123] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.080123] env[62619]: created_port_ids = self._update_ports_for_instance( [ 632.080123] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.080123] env[62619]: with excutils.save_and_reraise_exception(): [ 632.080123] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.080123] env[62619]: self.force_reraise() [ 632.080123] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.080123] env[62619]: raise self.value [ 632.080123] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.080123] env[62619]: updated_port = self._update_port( [ 632.080123] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.080123] env[62619]: _ensure_no_port_binding_failure(port) [ 632.080123] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.080123] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 632.081115] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. [ 632.081115] env[62619]: Removing descriptor: 17 [ 632.081115] env[62619]: ERROR nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Traceback (most recent call last): [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] yield resources [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self.driver.spawn(context, instance, image_meta, [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 632.081115] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] vm_ref = self.build_virtual_machine(instance, [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] vif_infos = vmwarevif.get_vif_info(self._session, [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] for vif in network_info: [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] return self._sync_wrapper(fn, *args, **kwargs) [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self.wait() [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self[:] = self._gt.wait() [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] return self._exit_event.wait() [ 632.081447] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] result = hub.switch() [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] return self.greenlet.switch() [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] result = function(*args, **kwargs) [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] return func(*args, **kwargs) [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] raise e [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] nwinfo = self.network_api.allocate_for_instance( [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 632.081791] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] created_port_ids = self._update_ports_for_instance( [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] with excutils.save_and_reraise_exception(): [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self.force_reraise() [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] raise self.value [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] updated_port = self._update_port( [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] _ensure_no_port_binding_failure(port) [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 632.082152] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] raise exception.PortBindingFailed(port_id=port['id']) [ 632.082485] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] nova.exception.PortBindingFailed: Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. [ 632.082485] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] [ 632.082485] env[62619]: INFO nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Terminating instance [ 632.083017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Acquiring lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 632.113258] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.301518] env[62619]: INFO nova.compute.manager [-] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Took 1.02 seconds to deallocate network for instance. [ 632.362338] env[62619]: DEBUG nova.network.neutron [req-e8458eda-e053-40dc-b77a-abfb186274a1 req-a3dfec85-223e-4e1c-968d-ad24f13cf973 service nova] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.433810] env[62619]: DEBUG nova.network.neutron [req-e8458eda-e053-40dc-b77a-abfb186274a1 req-a3dfec85-223e-4e1c-968d-ad24f13cf973 service nova] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 632.508112] env[62619]: INFO nova.compute.manager [-] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Took 1.03 seconds to deallocate network for instance. [ 632.615845] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Releasing lock "refresh_cache-7f17ce32-e674-4275-a313-a3f69dde2ee9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.616146] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 632.616351] env[62619]: DEBUG nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 632.617029] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 632.634275] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 632.807526] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 632.893699] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a09cfb6-48b5-4e6d-917c-e07135a4a381 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.901888] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f2ff32-5c0a-4ce3-b98f-1f9163aaa1e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.930818] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-99efb45b-b269-46c2-81f1-09b17e32515b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.937890] env[62619]: DEBUG oslo_concurrency.lockutils [req-e8458eda-e053-40dc-b77a-abfb186274a1 req-a3dfec85-223e-4e1c-968d-ad24f13cf973 service nova] Releasing lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 632.938342] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Acquired lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 632.938522] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 632.940538] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e5e4146-2df9-4274-b182-65ec1ecd547d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 632.954799] env[62619]: DEBUG nova.compute.provider_tree [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 633.014458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 633.136940] env[62619]: DEBUG nova.network.neutron [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.460960] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 633.473928] env[62619]: ERROR nova.scheduler.client.report [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [req-308902dd-c10c-424e-b079-9dd00fe0a456] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-308902dd-c10c-424e-b079-9dd00fe0a456"}]} [ 633.491620] env[62619]: DEBUG nova.scheduler.client.report [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 633.505892] env[62619]: DEBUG nova.scheduler.client.report [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 633.506128] env[62619]: DEBUG nova.compute.provider_tree [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 633.519126] env[62619]: DEBUG nova.scheduler.client.report [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 633.535755] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 633.537710] env[62619]: DEBUG nova.scheduler.client.report [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 633.641553] env[62619]: INFO nova.compute.manager [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] [instance: 7f17ce32-e674-4275-a313-a3f69dde2ee9] Took 1.02 seconds to deallocate network for instance. [ 633.865023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eda337ed-a5e4-4e63-aad7-f7e12793befe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.873013] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7697be2a-8bd9-4344-8f25-516839e5bf04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.903093] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59fafd9d-57da-4d60-833d-800b978a7e8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.911151] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31882ecb-1c02-401f-9179-390a9ac24865 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 633.924462] env[62619]: DEBUG nova.compute.provider_tree [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 633.955908] env[62619]: DEBUG nova.compute.manager [req-3efe702f-307d-4975-9a31-b1f8a3e3f819 req-ad907639-550e-4b99-abd5-e211d5e86bdd service nova] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Received event network-vif-deleted-758127a2-1efd-462c-94e0-cf1ca05545ae {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 634.041077] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Releasing lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 634.041204] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 634.041400] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 634.041694] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dd427a9a-a9d3-42b2-81c6-f98865862431 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.051065] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f84683d-a710-4fe2-af88-2b8dd1a1ce13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 634.073680] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc could not be found. [ 634.073888] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 634.074111] env[62619]: INFO nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Took 0.03 seconds to destroy the instance on the hypervisor. [ 634.074357] env[62619]: DEBUG oslo.service.loopingcall [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 634.074564] env[62619]: DEBUG nova.compute.manager [-] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 634.074656] env[62619]: DEBUG nova.network.neutron [-] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 634.089065] env[62619]: DEBUG nova.network.neutron [-] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 634.457499] env[62619]: DEBUG nova.scheduler.client.report [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 49 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 634.457794] env[62619]: DEBUG nova.compute.provider_tree [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 49 to 50 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 634.457984] env[62619]: DEBUG nova.compute.provider_tree [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 634.591584] env[62619]: DEBUG nova.network.neutron [-] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 634.670075] env[62619]: INFO nova.scheduler.client.report [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Deleted allocations for instance 7f17ce32-e674-4275-a313-a3f69dde2ee9 [ 634.962873] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.426s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 634.963461] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 634.966158] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.176s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 634.967567] env[62619]: INFO nova.compute.claims [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 635.094570] env[62619]: INFO nova.compute.manager [-] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Took 1.02 seconds to deallocate network for instance. [ 635.096463] env[62619]: DEBUG nova.compute.claims [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 635.096638] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 635.175914] env[62619]: DEBUG oslo_concurrency.lockutils [None req-55dcddcd-f42c-48d5-99b3-6b2d7d933ece tempest-DeleteServersAdminTestJSON-304460697 tempest-DeleteServersAdminTestJSON-304460697-project-member] Lock "7f17ce32-e674-4275-a313-a3f69dde2ee9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 66.985s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 635.472425] env[62619]: DEBUG nova.compute.utils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 635.475922] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 635.476093] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 635.529869] env[62619]: DEBUG nova.policy [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2de58eb8a07a4cd4a22c4b7dbd53a526', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f56ffd7008c4df68c875a8fe2591b9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 635.678146] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 635.804702] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Successfully created port: f88f8e1d-28c6-463a-a3b1-e82451ce6b58 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 635.976889] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 636.198491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 636.383730] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bef9f497-ad7e-4438-a70c-28f75d61cc1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.391746] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d313f574-974a-4414-a018-a0e14567bbae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.422528] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b7faa53-740c-45c3-b786-741cc70bc894 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.430640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc8b4a3f-df8c-4c7c-b49e-d84d6d6cbec1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 636.446288] env[62619]: DEBUG nova.compute.provider_tree [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 636.604560] env[62619]: DEBUG nova.compute.manager [req-75361a00-6d7b-4b36-af23-daaa550615cb req-f63ec22a-91b3-4bae-9ef9-4461375085bc service nova] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Received event network-changed-f88f8e1d-28c6-463a-a3b1-e82451ce6b58 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 636.604767] env[62619]: DEBUG nova.compute.manager [req-75361a00-6d7b-4b36-af23-daaa550615cb req-f63ec22a-91b3-4bae-9ef9-4461375085bc service nova] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Refreshing instance network info cache due to event network-changed-f88f8e1d-28c6-463a-a3b1-e82451ce6b58. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 636.604988] env[62619]: DEBUG oslo_concurrency.lockutils [req-75361a00-6d7b-4b36-af23-daaa550615cb req-f63ec22a-91b3-4bae-9ef9-4461375085bc service nova] Acquiring lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 636.605153] env[62619]: DEBUG oslo_concurrency.lockutils [req-75361a00-6d7b-4b36-af23-daaa550615cb req-f63ec22a-91b3-4bae-9ef9-4461375085bc service nova] Acquired lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 636.605689] env[62619]: DEBUG nova.network.neutron [req-75361a00-6d7b-4b36-af23-daaa550615cb req-f63ec22a-91b3-4bae-9ef9-4461375085bc service nova] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Refreshing network info cache for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 636.951022] env[62619]: DEBUG nova.scheduler.client.report [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 636.989200] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 637.016660] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 637.016987] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 637.017153] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 637.017340] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 637.017484] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 637.017626] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 637.017865] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 637.018034] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 637.018202] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 637.018362] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 637.018521] env[62619]: DEBUG nova.virt.hardware [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 637.019414] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738b67b6-baa3-4bc1-a5ab-c198dbabe4c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.029336] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6f6a567-bf39-4d9a-a88b-f776edcfd306 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.032802] env[62619]: ERROR nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. [ 637.032802] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 637.032802] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.032802] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 637.032802] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 637.032802] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 637.032802] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 637.032802] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 637.032802] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.032802] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 637.032802] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.032802] env[62619]: ERROR nova.compute.manager raise self.value [ 637.032802] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 637.032802] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 637.032802] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.032802] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 637.033529] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.033529] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 637.033529] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. [ 637.033529] env[62619]: ERROR nova.compute.manager [ 637.033529] env[62619]: Traceback (most recent call last): [ 637.033529] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 637.033529] env[62619]: listener.cb(fileno) [ 637.033529] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 637.033529] env[62619]: result = function(*args, **kwargs) [ 637.033529] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 637.033529] env[62619]: return func(*args, **kwargs) [ 637.033529] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 637.033529] env[62619]: raise e [ 637.033529] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.033529] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 637.033529] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 637.033529] env[62619]: created_port_ids = self._update_ports_for_instance( [ 637.033529] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 637.033529] env[62619]: with excutils.save_and_reraise_exception(): [ 637.033529] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.033529] env[62619]: self.force_reraise() [ 637.033529] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.033529] env[62619]: raise self.value [ 637.033529] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 637.033529] env[62619]: updated_port = self._update_port( [ 637.033529] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.033529] env[62619]: _ensure_no_port_binding_failure(port) [ 637.033529] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.033529] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 637.034666] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. [ 637.034666] env[62619]: Removing descriptor: 18 [ 637.044695] env[62619]: ERROR nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Traceback (most recent call last): [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] yield resources [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self.driver.spawn(context, instance, image_meta, [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self._vmops.spawn(context, instance, image_meta, injected_files, [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] vm_ref = self.build_virtual_machine(instance, [ 637.044695] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] vif_infos = vmwarevif.get_vif_info(self._session, [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] for vif in network_info: [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] return self._sync_wrapper(fn, *args, **kwargs) [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self.wait() [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self[:] = self._gt.wait() [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] return self._exit_event.wait() [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 637.045066] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] current.throw(*self._exc) [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] result = function(*args, **kwargs) [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] return func(*args, **kwargs) [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] raise e [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] nwinfo = self.network_api.allocate_for_instance( [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] created_port_ids = self._update_ports_for_instance( [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] with excutils.save_and_reraise_exception(): [ 637.045471] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self.force_reraise() [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] raise self.value [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] updated_port = self._update_port( [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] _ensure_no_port_binding_failure(port) [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] raise exception.PortBindingFailed(port_id=port['id']) [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] nova.exception.PortBindingFailed: Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. [ 637.045829] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] [ 637.045829] env[62619]: INFO nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Terminating instance [ 637.046997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 637.129679] env[62619]: DEBUG nova.network.neutron [req-75361a00-6d7b-4b36-af23-daaa550615cb req-f63ec22a-91b3-4bae-9ef9-4461375085bc service nova] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 637.270233] env[62619]: DEBUG nova.network.neutron [req-75361a00-6d7b-4b36-af23-daaa550615cb req-f63ec22a-91b3-4bae-9ef9-4461375085bc service nova] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.458251] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.490s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 637.458251] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 637.462798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.603s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 637.462843] env[62619]: DEBUG nova.objects.instance [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 637.774856] env[62619]: DEBUG oslo_concurrency.lockutils [req-75361a00-6d7b-4b36-af23-daaa550615cb req-f63ec22a-91b3-4bae-9ef9-4461375085bc service nova] Releasing lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 637.775291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquired lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 637.775477] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 637.966021] env[62619]: DEBUG nova.compute.utils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 637.966021] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 637.966021] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 638.050872] env[62619]: DEBUG nova.policy [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66d44144b6864c30b9e593927a12c756', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928b8764106043caadbf11db62d3228e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 638.227035] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.227035] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.300606] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 638.399062] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.471512] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 638.473953] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77e6bb37-40fb-48ad-9983-e157fd7e52a5 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 638.475720] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 18.871s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 638.625100] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Successfully created port: 037f72ab-0bda-4c8e-abf0-a84b1d9d5817 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 638.635504] env[62619]: DEBUG nova.compute.manager [req-6761fb0d-cf95-438d-bfdf-b69f21bf8e41 req-e0f5a4e2-6f17-47ec-9e3d-2a42758f74a3 service nova] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Received event network-vif-deleted-f88f8e1d-28c6-463a-a3b1-e82451ce6b58 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 638.742026] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 638.742026] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 638.742026] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 638.900998] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Releasing lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 638.901452] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 638.901641] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 638.901941] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f112257a-3778-4849-98ef-508fae755740 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.911983] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c52447e8-d874-4b79-ad1b-be75df57c673 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.935023] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c4ada5ac-4da7-4ae8-a9d9-577a83dd0359 could not be found. [ 638.935445] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 638.935445] env[62619]: INFO nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Took 0.03 seconds to destroy the instance on the hypervisor. [ 638.935734] env[62619]: DEBUG oslo.service.loopingcall [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 638.935899] env[62619]: DEBUG nova.compute.manager [-] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 638.935991] env[62619]: DEBUG nova.network.neutron [-] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 638.956663] env[62619]: DEBUG nova.network.neutron [-] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 639.246021] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 639.246021] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 639.246379] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 639.246379] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 639.246379] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 639.283521] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "refresh_cache-79a94ed1-1139-4194-8091-00b7b1562330" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.285112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquired lock "refresh_cache-79a94ed1-1139-4194-8091-00b7b1562330" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.285112] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 639.285404] env[62619]: DEBUG nova.objects.instance [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lazy-loading 'info_cache' on Instance uuid 79a94ed1-1139-4194-8091-00b7b1562330 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 639.461306] env[62619]: DEBUG nova.network.neutron [-] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 639.464909] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dbe0865-167b-475f-843a-1916a245e43e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.475079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-902e708d-c83a-402a-a2ee-c5a248bd5737 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.481028] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 639.512303] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb0f4c86-c680-477d-a637-2af7740cf1c4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.524099] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 639.524407] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 639.528045] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 639.528045] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 639.528045] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 639.528045] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 639.528045] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 639.528351] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 639.528351] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 639.528351] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 639.528351] env[62619]: DEBUG nova.virt.hardware [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 639.528351] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5fd5f45-d7fc-418c-a6a9-479d2f4a6cc9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.533440] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6abe45d1-8cce-4249-b4cd-550dd7c3d7ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.549977] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1585c7fa-9c69-4898-aebc-63fa9961e10d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 639.554864] env[62619]: DEBUG nova.compute.provider_tree [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 639.724971] env[62619]: ERROR nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. [ 639.724971] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 639.724971] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 639.724971] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 639.724971] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 639.724971] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 639.724971] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 639.724971] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 639.724971] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 639.724971] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 639.724971] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 639.724971] env[62619]: ERROR nova.compute.manager raise self.value [ 639.724971] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 639.724971] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 639.724971] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 639.724971] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 639.725451] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 639.725451] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 639.725451] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. [ 639.725451] env[62619]: ERROR nova.compute.manager [ 639.725451] env[62619]: Traceback (most recent call last): [ 639.725451] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 639.725451] env[62619]: listener.cb(fileno) [ 639.725451] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 639.725451] env[62619]: result = function(*args, **kwargs) [ 639.725451] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 639.725451] env[62619]: return func(*args, **kwargs) [ 639.725451] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 639.725451] env[62619]: raise e [ 639.725451] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 639.725451] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 639.725451] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 639.725451] env[62619]: created_port_ids = self._update_ports_for_instance( [ 639.725451] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 639.725451] env[62619]: with excutils.save_and_reraise_exception(): [ 639.725451] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 639.725451] env[62619]: self.force_reraise() [ 639.725451] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 639.725451] env[62619]: raise self.value [ 639.725451] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 639.725451] env[62619]: updated_port = self._update_port( [ 639.725451] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 639.725451] env[62619]: _ensure_no_port_binding_failure(port) [ 639.725451] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 639.725451] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 639.726974] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. [ 639.726974] env[62619]: Removing descriptor: 18 [ 639.726974] env[62619]: ERROR nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Traceback (most recent call last): [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] yield resources [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self.driver.spawn(context, instance, image_meta, [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self._vmops.spawn(context, instance, image_meta, injected_files, [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 639.726974] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] vm_ref = self.build_virtual_machine(instance, [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] vif_infos = vmwarevif.get_vif_info(self._session, [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] for vif in network_info: [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] return self._sync_wrapper(fn, *args, **kwargs) [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self.wait() [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self[:] = self._gt.wait() [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] return self._exit_event.wait() [ 639.727290] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] result = hub.switch() [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] return self.greenlet.switch() [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] result = function(*args, **kwargs) [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] return func(*args, **kwargs) [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] raise e [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] nwinfo = self.network_api.allocate_for_instance( [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 639.727947] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] created_port_ids = self._update_ports_for_instance( [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] with excutils.save_and_reraise_exception(): [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self.force_reraise() [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] raise self.value [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] updated_port = self._update_port( [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] _ensure_no_port_binding_failure(port) [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 639.728390] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] raise exception.PortBindingFailed(port_id=port['id']) [ 639.728711] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] nova.exception.PortBindingFailed: Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. [ 639.728711] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] [ 639.728711] env[62619]: INFO nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Terminating instance [ 639.732629] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 639.732629] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquired lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 639.732629] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 639.965348] env[62619]: INFO nova.compute.manager [-] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Took 1.03 seconds to deallocate network for instance. [ 639.967891] env[62619]: DEBUG nova.compute.claims [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 639.968091] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 640.058343] env[62619]: DEBUG nova.scheduler.client.report [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 640.255959] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 640.319205] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 640.328097] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.563899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.089s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 640.564608] env[62619]: ERROR nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Traceback (most recent call last): [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self.driver.spawn(context, instance, image_meta, [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self._vmops.spawn(context, instance, image_meta, injected_files, [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] vm_ref = self.build_virtual_machine(instance, [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] vif_infos = vmwarevif.get_vif_info(self._session, [ 640.564608] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] for vif in network_info: [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] return self._sync_wrapper(fn, *args, **kwargs) [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self.wait() [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self[:] = self._gt.wait() [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] return self._exit_event.wait() [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] result = hub.switch() [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 640.565706] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] return self.greenlet.switch() [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] result = function(*args, **kwargs) [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] return func(*args, **kwargs) [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] raise e [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] nwinfo = self.network_api.allocate_for_instance( [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] created_port_ids = self._update_ports_for_instance( [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] with excutils.save_and_reraise_exception(): [ 640.566085] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] self.force_reraise() [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] raise self.value [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] updated_port = self._update_port( [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] _ensure_no_port_binding_failure(port) [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] raise exception.PortBindingFailed(port_id=port['id']) [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] nova.exception.PortBindingFailed: Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. [ 640.566400] env[62619]: ERROR nova.compute.manager [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] [ 640.566668] env[62619]: DEBUG nova.compute.utils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 640.566668] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.130s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 640.572127] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Build of instance b79f2461-2b0f-4427-abb8-7a3a192e6230 was re-scheduled: Binding failed for port cfc22c12-4455-44de-8005-6c8a50fac191, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 640.572127] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 640.572127] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Acquiring lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.572127] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Acquired lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.572374] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 640.783338] env[62619]: DEBUG nova.compute.manager [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Received event network-changed-037f72ab-0bda-4c8e-abf0-a84b1d9d5817 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 640.783538] env[62619]: DEBUG nova.compute.manager [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Refreshing instance network info cache due to event network-changed-037f72ab-0bda-4c8e-abf0-a84b1d9d5817. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 640.783726] env[62619]: DEBUG oslo_concurrency.lockutils [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] Acquiring lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 640.830593] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Releasing lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 640.831048] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 640.831248] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 640.831588] env[62619]: DEBUG oslo_concurrency.lockutils [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] Acquired lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 640.831730] env[62619]: DEBUG nova.network.neutron [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Refreshing network info cache for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 640.833432] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae18d94e-1e68-4284-bb0e-96caf2d39830 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.852873] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f6b0f2d-ce72-49ba-a204-de150a1d4df1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 640.876435] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 6f32ec6e-af29-4c0e-8f8b-708cbd1af474 could not be found. [ 640.879021] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 640.879021] env[62619]: INFO nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Took 0.05 seconds to destroy the instance on the hypervisor. [ 640.879021] env[62619]: DEBUG oslo.service.loopingcall [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 640.879021] env[62619]: DEBUG nova.compute.manager [-] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 640.879021] env[62619]: DEBUG nova.network.neutron [-] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 640.880070] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.901028] env[62619]: DEBUG nova.network.neutron [-] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.091478] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.099815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "c9c375f1-dd7f-43fb-acf6-45e766a8333d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.099815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "c9c375f1-dd7f-43fb-acf6-45e766a8333d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 641.191674] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.352264] env[62619]: DEBUG nova.network.neutron [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.384396] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Releasing lock "refresh_cache-79a94ed1-1139-4194-8091-00b7b1562330" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.384566] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 641.385280] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.385280] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.385413] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.385674] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.385883] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.386528] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.386528] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 641.386528] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 641.403340] env[62619]: DEBUG nova.network.neutron [-] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.415593] env[62619]: DEBUG nova.network.neutron [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 641.423162] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60eae532-92f6-4471-8d19-c117691505ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.434020] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b566290-c752-4182-b898-a7c8b0acfc28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.462851] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b420c02-caff-42b0-aa84-6227f499fe9d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.473296] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a0c41b32-86be-4803-b0f7-dbf211cad02f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 641.488522] env[62619]: DEBUG nova.compute.provider_tree [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 641.695308] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Releasing lock "refresh_cache-b79f2461-2b0f-4427-abb8-7a3a192e6230" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.695308] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 641.695308] env[62619]: DEBUG nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 641.695308] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 641.720854] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 641.891607] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.907274] env[62619]: INFO nova.compute.manager [-] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Took 1.03 seconds to deallocate network for instance. [ 641.909583] env[62619]: DEBUG nova.compute.claims [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 641.909761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 641.918317] env[62619]: DEBUG oslo_concurrency.lockutils [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] Releasing lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 641.918537] env[62619]: DEBUG nova.compute.manager [req-adeab61a-fd2b-44b6-9905-e9a4cbb7f37d req-405eaae2-711b-440a-a0a5-d91046316493 service nova] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Received event network-vif-deleted-037f72ab-0bda-4c8e-abf0-a84b1d9d5817 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 641.991790] env[62619]: DEBUG nova.scheduler.client.report [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 642.222956] env[62619]: DEBUG nova.network.neutron [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 642.497989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.931s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 642.498559] env[62619]: ERROR nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Traceback (most recent call last): [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self.driver.spawn(context, instance, image_meta, [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self._vmops.spawn(context, instance, image_meta, injected_files, [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] vm_ref = self.build_virtual_machine(instance, [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] vif_infos = vmwarevif.get_vif_info(self._session, [ 642.498559] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] for vif in network_info: [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] return self._sync_wrapper(fn, *args, **kwargs) [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self.wait() [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self[:] = self._gt.wait() [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] return self._exit_event.wait() [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] current.throw(*self._exc) [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 642.498899] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] result = function(*args, **kwargs) [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] return func(*args, **kwargs) [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] raise e [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] nwinfo = self.network_api.allocate_for_instance( [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] created_port_ids = self._update_ports_for_instance( [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] with excutils.save_and_reraise_exception(): [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] self.force_reraise() [ 642.499270] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] raise self.value [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] updated_port = self._update_port( [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] _ensure_no_port_binding_failure(port) [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] raise exception.PortBindingFailed(port_id=port['id']) [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] nova.exception.PortBindingFailed: Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. [ 642.499614] env[62619]: ERROR nova.compute.manager [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] [ 642.499614] env[62619]: DEBUG nova.compute.utils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 642.500717] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.232s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 642.502152] env[62619]: INFO nova.compute.claims [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 642.505141] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Build of instance ee06b107-4352-4491-b9bb-7faa7ccb5571 was re-scheduled: Binding failed for port 38a8e5b2-bc03-4720-9b2d-6905bfedbca5, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 642.505487] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 642.505744] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Acquiring lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 642.505894] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Acquired lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 642.506067] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 642.726938] env[62619]: INFO nova.compute.manager [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] [instance: b79f2461-2b0f-4427-abb8-7a3a192e6230] Took 1.03 seconds to deallocate network for instance. [ 643.033803] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.111994] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 643.614427] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Releasing lock "refresh_cache-ee06b107-4352-4491-b9bb-7faa7ccb5571" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 643.614669] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 643.615538] env[62619]: DEBUG nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 643.615756] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 643.633274] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 643.756645] env[62619]: INFO nova.scheduler.client.report [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Deleted allocations for instance b79f2461-2b0f-4427-abb8-7a3a192e6230 [ 643.890175] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b16c096b-5c1a-4550-82af-b117ef3e83a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.898897] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55300d54-dc38-4666-9d30-7f15681e5cc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.929162] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fe3fbc4-e231-4abd-b190-cd1475d1b23e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.937470] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce06a3cb-fb3d-4235-9cb3-bf74eb665a21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 643.953686] env[62619]: DEBUG nova.compute.provider_tree [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 644.136888] env[62619]: DEBUG nova.network.neutron [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 644.267753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eae585f0-15bc-4aa4-8d27-a983e384532e tempest-ServerRescueTestJSON-484164136 tempest-ServerRescueTestJSON-484164136-project-member] Lock "b79f2461-2b0f-4427-abb8-7a3a192e6230" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 74.477s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.457259] env[62619]: DEBUG nova.scheduler.client.report [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 644.639618] env[62619]: INFO nova.compute.manager [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] [instance: ee06b107-4352-4491-b9bb-7faa7ccb5571] Took 1.02 seconds to deallocate network for instance. [ 644.771638] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 644.962743] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.462s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 644.963363] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 644.966241] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.299s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 644.967697] env[62619]: INFO nova.compute.claims [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 645.290413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 645.472515] env[62619]: DEBUG nova.compute.utils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 645.473945] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 645.474130] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 645.552829] env[62619]: DEBUG nova.policy [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69221dd949b54fd299922c94ccfc14b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e963f91b5bd4691ba0108dd80da3dff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 645.665770] env[62619]: INFO nova.scheduler.client.report [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Deleted allocations for instance ee06b107-4352-4491-b9bb-7faa7ccb5571 [ 645.979502] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 646.076305] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Successfully created port: 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 646.179816] env[62619]: DEBUG oslo_concurrency.lockutils [None req-523c9196-9c8c-4a73-8190-23ed593f11f6 tempest-InstanceActionsNegativeTestJSON-176149514 tempest-InstanceActionsNegativeTestJSON-176149514-project-member] Lock "ee06b107-4352-4491-b9bb-7faa7ccb5571" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 68.641s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 646.360268] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a7dd239-13d4-49d5-b3d0-d29afaaac000 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.368552] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bfbeba7-9b1c-4e08-8ff3-f79763a536ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.400723] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a9ccdd-d3d1-42e5-9e12-476f49b7025e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.413807] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14cad3bc-d155-4adb-96dc-dd623187a7eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 646.428135] env[62619]: DEBUG nova.compute.provider_tree [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 646.683987] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 646.935314] env[62619]: DEBUG nova.scheduler.client.report [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 646.991688] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 647.027210] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 647.030823] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 647.030823] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 647.030823] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 647.030823] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 647.030823] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 647.030823] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 647.031068] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 647.031068] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 647.031068] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 647.031068] env[62619]: DEBUG nova.virt.hardware [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 647.031068] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed56201-5bb9-4530-bece-f594e6c8e3c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.038521] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4992b4e9-f3fa-43f0-a7d7-7ef8e634dc80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 647.044682] env[62619]: DEBUG nova.compute.manager [req-4e303e60-c186-4fbf-af10-3a2778a2c985 req-36b2d369-9b84-494d-98d1-21a75116eec6 service nova] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Received event network-changed-15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 647.044881] env[62619]: DEBUG nova.compute.manager [req-4e303e60-c186-4fbf-af10-3a2778a2c985 req-36b2d369-9b84-494d-98d1-21a75116eec6 service nova] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Refreshing instance network info cache due to event network-changed-15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 647.046876] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e303e60-c186-4fbf-af10-3a2778a2c985 req-36b2d369-9b84-494d-98d1-21a75116eec6 service nova] Acquiring lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.046876] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e303e60-c186-4fbf-af10-3a2778a2c985 req-36b2d369-9b84-494d-98d1-21a75116eec6 service nova] Acquired lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 647.046876] env[62619]: DEBUG nova.network.neutron [req-4e303e60-c186-4fbf-af10-3a2778a2c985 req-36b2d369-9b84-494d-98d1-21a75116eec6 service nova] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Refreshing network info cache for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 647.216182] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 647.292775] env[62619]: ERROR nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. [ 647.292775] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 647.292775] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.292775] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 647.292775] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.292775] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 647.292775] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.292775] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 647.292775] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.292775] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 647.292775] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.292775] env[62619]: ERROR nova.compute.manager raise self.value [ 647.292775] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.292775] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 647.292775] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.292775] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 647.294547] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.294547] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 647.294547] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. [ 647.294547] env[62619]: ERROR nova.compute.manager [ 647.294547] env[62619]: Traceback (most recent call last): [ 647.294547] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 647.294547] env[62619]: listener.cb(fileno) [ 647.294547] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.294547] env[62619]: result = function(*args, **kwargs) [ 647.294547] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 647.294547] env[62619]: return func(*args, **kwargs) [ 647.294547] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.294547] env[62619]: raise e [ 647.294547] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.294547] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 647.294547] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.294547] env[62619]: created_port_ids = self._update_ports_for_instance( [ 647.294547] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.294547] env[62619]: with excutils.save_and_reraise_exception(): [ 647.294547] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.294547] env[62619]: self.force_reraise() [ 647.294547] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.294547] env[62619]: raise self.value [ 647.294547] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.294547] env[62619]: updated_port = self._update_port( [ 647.294547] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.294547] env[62619]: _ensure_no_port_binding_failure(port) [ 647.294547] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.294547] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 647.295434] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. [ 647.295434] env[62619]: Removing descriptor: 17 [ 647.295434] env[62619]: ERROR nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Traceback (most recent call last): [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] yield resources [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self.driver.spawn(context, instance, image_meta, [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self._vmops.spawn(context, instance, image_meta, injected_files, [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 647.295434] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] vm_ref = self.build_virtual_machine(instance, [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] vif_infos = vmwarevif.get_vif_info(self._session, [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] for vif in network_info: [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] return self._sync_wrapper(fn, *args, **kwargs) [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self.wait() [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self[:] = self._gt.wait() [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] return self._exit_event.wait() [ 647.295747] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] result = hub.switch() [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] return self.greenlet.switch() [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] result = function(*args, **kwargs) [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] return func(*args, **kwargs) [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] raise e [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] nwinfo = self.network_api.allocate_for_instance( [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 647.296166] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] created_port_ids = self._update_ports_for_instance( [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] with excutils.save_and_reraise_exception(): [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self.force_reraise() [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] raise self.value [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] updated_port = self._update_port( [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] _ensure_no_port_binding_failure(port) [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 647.296818] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] raise exception.PortBindingFailed(port_id=port['id']) [ 647.297201] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] nova.exception.PortBindingFailed: Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. [ 647.297201] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] [ 647.297201] env[62619]: INFO nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Terminating instance [ 647.297201] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 647.441245] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.475s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 647.441821] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 647.444529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 18.939s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 647.444724] env[62619]: DEBUG nova.objects.instance [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 647.712698] env[62619]: DEBUG nova.network.neutron [req-4e303e60-c186-4fbf-af10-3a2778a2c985 req-36b2d369-9b84-494d-98d1-21a75116eec6 service nova] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 647.896506] env[62619]: DEBUG nova.network.neutron [req-4e303e60-c186-4fbf-af10-3a2778a2c985 req-36b2d369-9b84-494d-98d1-21a75116eec6 service nova] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 647.951024] env[62619]: DEBUG nova.compute.utils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 647.953409] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 647.953573] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 648.010319] env[62619]: DEBUG nova.policy [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '69221dd949b54fd299922c94ccfc14b4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '6e963f91b5bd4691ba0108dd80da3dff', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 648.399684] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e303e60-c186-4fbf-af10-3a2778a2c985 req-36b2d369-9b84-494d-98d1-21a75116eec6 service nova] Releasing lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 648.400410] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquired lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 648.400743] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 648.458230] env[62619]: DEBUG oslo_concurrency.lockutils [None req-11731cb2-548c-4d40-976f-35102bd8f235 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.012s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 648.458397] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 648.461558] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 19.457s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 648.461645] env[62619]: DEBUG nova.objects.instance [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 648.530841] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Successfully created port: 8edd1ac1-581d-4f5a-9073-1afdc0ea3484 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 648.931131] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 649.071064] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 649.074772] env[62619]: DEBUG nova.compute.manager [req-135e981e-6c6b-447f-9394-97c2a3cf6c05 req-71b8cfde-d764-4605-8267-e3d358a78cc1 service nova] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Received event network-vif-deleted-15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 649.478817] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 649.481532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f41d0995-ad15-4302-94d8-e4a073eb99b0 tempest-ServersAdmin275Test-655410159 tempest-ServersAdmin275Test-655410159-project-admin] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.020s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 649.482721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 19.384s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 649.482931] env[62619]: DEBUG nova.objects.instance [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lazy-loading 'resources' on Instance uuid d7404720-7cf0-41bf-a882-2cb6db2253bc {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 649.521233] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 649.521233] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 649.521233] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 649.521233] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 649.521475] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 649.521475] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 649.521475] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 649.521475] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 649.522526] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 649.522526] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 649.522526] env[62619]: DEBUG nova.virt.hardware [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 649.523824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c1ab6a3-2806-40f5-ac68-73e6e93024e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.538904] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bfe1e7-cde1-4774-8743-ca72a334c188 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.574312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Releasing lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 649.574741] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 649.574932] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 649.575249] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-007d9859-9e3b-4fb8-9ecf-af2ccafd1756 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.585706] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93ff3c1-d10c-40bc-ba0e-becab2987e79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 649.611624] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e145f3d4-fb6e-4c14-abdf-d85596fb0218 could not be found. [ 649.611875] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 649.612094] env[62619]: INFO nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Took 0.04 seconds to destroy the instance on the hypervisor. [ 649.612353] env[62619]: DEBUG oslo.service.loopingcall [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 649.612579] env[62619]: DEBUG nova.compute.manager [-] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 649.612670] env[62619]: DEBUG nova.network.neutron [-] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 649.640616] env[62619]: DEBUG nova.network.neutron [-] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 649.682605] env[62619]: ERROR nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. [ 649.682605] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 649.682605] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.682605] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 649.682605] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.682605] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 649.682605] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.682605] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 649.682605] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.682605] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 649.682605] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.682605] env[62619]: ERROR nova.compute.manager raise self.value [ 649.682605] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.682605] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 649.682605] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.682605] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 649.683084] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.683084] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 649.683084] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. [ 649.683084] env[62619]: ERROR nova.compute.manager [ 649.683084] env[62619]: Traceback (most recent call last): [ 649.683084] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 649.683084] env[62619]: listener.cb(fileno) [ 649.683084] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.683084] env[62619]: result = function(*args, **kwargs) [ 649.683084] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.683084] env[62619]: return func(*args, **kwargs) [ 649.683084] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.683084] env[62619]: raise e [ 649.683084] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.683084] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 649.683084] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.683084] env[62619]: created_port_ids = self._update_ports_for_instance( [ 649.683084] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.683084] env[62619]: with excutils.save_and_reraise_exception(): [ 649.683084] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.683084] env[62619]: self.force_reraise() [ 649.683084] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.683084] env[62619]: raise self.value [ 649.683084] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.683084] env[62619]: updated_port = self._update_port( [ 649.683084] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.683084] env[62619]: _ensure_no_port_binding_failure(port) [ 649.683084] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.683084] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 649.683938] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. [ 649.683938] env[62619]: Removing descriptor: 17 [ 649.683938] env[62619]: ERROR nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Traceback (most recent call last): [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] yield resources [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self.driver.spawn(context, instance, image_meta, [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 649.683938] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] vm_ref = self.build_virtual_machine(instance, [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] vif_infos = vmwarevif.get_vif_info(self._session, [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] for vif in network_info: [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] return self._sync_wrapper(fn, *args, **kwargs) [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self.wait() [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self[:] = self._gt.wait() [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] return self._exit_event.wait() [ 649.684284] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] result = hub.switch() [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] return self.greenlet.switch() [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] result = function(*args, **kwargs) [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] return func(*args, **kwargs) [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] raise e [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] nwinfo = self.network_api.allocate_for_instance( [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 649.684690] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] created_port_ids = self._update_ports_for_instance( [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] with excutils.save_and_reraise_exception(): [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self.force_reraise() [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] raise self.value [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] updated_port = self._update_port( [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] _ensure_no_port_binding_failure(port) [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 649.685046] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] raise exception.PortBindingFailed(port_id=port['id']) [ 649.685399] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] nova.exception.PortBindingFailed: Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. [ 649.685399] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] [ 649.685399] env[62619]: INFO nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Terminating instance [ 649.686196] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 649.686359] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquired lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 649.686786] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 650.141720] env[62619]: DEBUG nova.network.neutron [-] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.223105] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.376482] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 650.415011] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dc93d63-8dd6-4fb2-a69b-d4309a7a2614 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.423631] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1a996a-3ef3-4cf2-8514-89a80bdc5450 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.455976] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08d7d37b-02f4-4171-b31d-0a7f77141737 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.465180] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10f2bb83-4442-49db-b244-5f6695112efa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.480718] env[62619]: DEBUG nova.compute.provider_tree [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 650.644443] env[62619]: INFO nova.compute.manager [-] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Took 1.03 seconds to deallocate network for instance. [ 650.646932] env[62619]: DEBUG nova.compute.claims [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 650.647027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.725690] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Acquiring lock "8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 650.725834] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Lock "8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 650.879671] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Releasing lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 650.880149] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 650.880676] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 650.880676] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-758d2aa8-0c7f-4a66-8ac4-c144e332f190 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.892414] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-158d16cf-a6ec-405b-9649-8e1557ce68c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 650.920206] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8 could not be found. [ 650.920436] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 650.920619] env[62619]: INFO nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 650.920864] env[62619]: DEBUG oslo.service.loopingcall [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 650.921436] env[62619]: DEBUG nova.compute.manager [-] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 650.921436] env[62619]: DEBUG nova.network.neutron [-] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 650.938307] env[62619]: DEBUG nova.network.neutron [-] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 650.984127] env[62619]: DEBUG nova.scheduler.client.report [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 651.102480] env[62619]: DEBUG nova.compute.manager [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Received event network-changed-8edd1ac1-581d-4f5a-9073-1afdc0ea3484 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 651.102767] env[62619]: DEBUG nova.compute.manager [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Refreshing instance network info cache due to event network-changed-8edd1ac1-581d-4f5a-9073-1afdc0ea3484. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 651.102867] env[62619]: DEBUG oslo_concurrency.lockutils [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] Acquiring lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 651.103043] env[62619]: DEBUG oslo_concurrency.lockutils [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] Acquired lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 651.104059] env[62619]: DEBUG nova.network.neutron [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Refreshing network info cache for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 651.440740] env[62619]: DEBUG nova.network.neutron [-] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.490407] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.008s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 651.492709] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 20.174s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 651.522528] env[62619]: INFO nova.scheduler.client.report [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Deleted allocations for instance d7404720-7cf0-41bf-a882-2cb6db2253bc [ 651.638978] env[62619]: DEBUG nova.network.neutron [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 651.762385] env[62619]: DEBUG nova.network.neutron [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 651.943245] env[62619]: INFO nova.compute.manager [-] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Took 1.02 seconds to deallocate network for instance. [ 651.946805] env[62619]: DEBUG nova.compute.claims [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 651.946927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.032873] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2e94889b-a862-4d1b-be7f-8fe9fde0784b tempest-ServersAaction247Test-661174374 tempest-ServersAaction247Test-661174374-project-member] Lock "d7404720-7cf0-41bf-a882-2cb6db2253bc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 25.922s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 652.265189] env[62619]: DEBUG oslo_concurrency.lockutils [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] Releasing lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 652.265475] env[62619]: DEBUG nova.compute.manager [req-04991678-d507-41c8-93c4-35642c15537a req-933a1777-47d2-471f-b0c3-e59969b7c3fd service nova] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Received event network-vif-deleted-8edd1ac1-581d-4f5a-9073-1afdc0ea3484 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 652.360213] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81961923-35bf-486c-9eb7-ff7ad6753775 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.370591] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8ee5905-1b4f-489a-8833-1c44171df635 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.403750] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb5f6c09-fca2-4a44-af7b-eb868f566551 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.409947] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eecf8778-08fc-4cb0-8d0e-d608a1cbc15b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 652.429240] env[62619]: DEBUG nova.compute.provider_tree [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 652.654934] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Acquiring lock "695dfaa6-8e34-4426-b025-6ce7e4e3174c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 652.654934] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Lock "695dfaa6-8e34-4426-b025-6ce7e4e3174c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 652.933166] env[62619]: DEBUG nova.scheduler.client.report [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 653.438415] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.946s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 653.439062] env[62619]: ERROR nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Traceback (most recent call last): [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self.driver.spawn(context, instance, image_meta, [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] vm_ref = self.build_virtual_machine(instance, [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] vif_infos = vmwarevif.get_vif_info(self._session, [ 653.439062] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] for vif in network_info: [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] return self._sync_wrapper(fn, *args, **kwargs) [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self.wait() [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self[:] = self._gt.wait() [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] return self._exit_event.wait() [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] result = hub.switch() [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 653.439411] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] return self.greenlet.switch() [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] result = function(*args, **kwargs) [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] return func(*args, **kwargs) [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] raise e [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] nwinfo = self.network_api.allocate_for_instance( [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] created_port_ids = self._update_ports_for_instance( [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] with excutils.save_and_reraise_exception(): [ 653.439767] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] self.force_reraise() [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] raise self.value [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] updated_port = self._update_port( [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] _ensure_no_port_binding_failure(port) [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] raise exception.PortBindingFailed(port_id=port['id']) [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] nova.exception.PortBindingFailed: Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. [ 653.440167] env[62619]: ERROR nova.compute.manager [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] [ 653.440473] env[62619]: DEBUG nova.compute.utils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 653.440990] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 21.853s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 653.442585] env[62619]: INFO nova.compute.claims [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 653.445749] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Build of instance 972e1187-09ee-4703-a3bc-7eb213a5c52e was re-scheduled: Binding failed for port ea6ce5f8-690a-4e8e-8143-d1b062fe542b, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 653.446219] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 653.446445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Acquiring lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 653.446589] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Acquired lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 653.446745] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 654.112152] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.229124] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.731686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Releasing lock "refresh_cache-972e1187-09ee-4703-a3bc-7eb213a5c52e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 654.731912] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 654.732103] env[62619]: DEBUG nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 654.732266] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 654.742744] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bac560-4e31-43ef-8d0b-4c6596b75621 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.750354] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79a8338e-4c42-490a-a549-58845f735a88 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.756367] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 654.781350] env[62619]: DEBUG nova.network.neutron [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 654.782864] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d6ff686-6eb4-4a3b-9c8e-611462c7cb7c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.791559] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae56bf0f-2396-4bcb-8a4d-840bb594fdc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 654.805631] env[62619]: DEBUG nova.compute.provider_tree [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 655.286512] env[62619]: INFO nova.compute.manager [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] [instance: 972e1187-09ee-4703-a3bc-7eb213a5c52e] Took 0.55 seconds to deallocate network for instance. [ 655.308624] env[62619]: DEBUG nova.scheduler.client.report [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 655.813193] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 655.813730] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 655.816396] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 23.009s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 655.816611] env[62619]: DEBUG nova.objects.instance [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lazy-loading 'resources' on Instance uuid b7c425a1-a80d-4a62-a71f-d14fdf638cf7 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 656.318241] env[62619]: INFO nova.scheduler.client.report [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Deleted allocations for instance 972e1187-09ee-4703-a3bc-7eb213a5c52e [ 656.325410] env[62619]: DEBUG nova.compute.utils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 656.330493] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 656.330647] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 656.412808] env[62619]: DEBUG nova.policy [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3041343376d4f2fad14577d5c412b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4270942193cd4a9aa397784368b9ae64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 656.653500] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03fad7a5-96d0-4ab7-9c98-7c7d88ff2a9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.661245] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d9f7487-b089-401f-86d3-0ee2a39eefc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.693178] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adf45166-ee83-4169-9955-5e14fb9be84c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.699713] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-830bdd76-7761-466a-979b-39598875a51f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 656.713347] env[62619]: DEBUG nova.compute.provider_tree [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 656.821393] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Successfully created port: 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 656.831207] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 656.834162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-019cd5f2-d466-47e3-84ef-96427387228f tempest-ServerGroupTestJSON-193793233 tempest-ServerGroupTestJSON-193793233-project-member] Lock "972e1187-09ee-4703-a3bc-7eb213a5c52e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 75.294s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.218481] env[62619]: DEBUG nova.scheduler.client.report [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 657.342089] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 657.724592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.906s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 657.724592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.710s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 657.726543] env[62619]: DEBUG nova.objects.instance [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lazy-loading 'resources' on Instance uuid 79a94ed1-1139-4194-8091-00b7b1562330 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 657.753098] env[62619]: INFO nova.scheduler.client.report [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Deleted allocations for instance b7c425a1-a80d-4a62-a71f-d14fdf638cf7 [ 657.784370] env[62619]: DEBUG nova.compute.manager [req-4ecf8d1b-f01e-4d36-ae7a-03d67e0d4c4f req-8a67baf7-6c3a-4b9c-a697-7082f69a2a5d service nova] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Received event network-changed-4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 657.784370] env[62619]: DEBUG nova.compute.manager [req-4ecf8d1b-f01e-4d36-ae7a-03d67e0d4c4f req-8a67baf7-6c3a-4b9c-a697-7082f69a2a5d service nova] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Refreshing instance network info cache due to event network-changed-4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 657.785031] env[62619]: DEBUG oslo_concurrency.lockutils [req-4ecf8d1b-f01e-4d36-ae7a-03d67e0d4c4f req-8a67baf7-6c3a-4b9c-a697-7082f69a2a5d service nova] Acquiring lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 657.785031] env[62619]: DEBUG oslo_concurrency.lockutils [req-4ecf8d1b-f01e-4d36-ae7a-03d67e0d4c4f req-8a67baf7-6c3a-4b9c-a697-7082f69a2a5d service nova] Acquired lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 657.785031] env[62619]: DEBUG nova.network.neutron [req-4ecf8d1b-f01e-4d36-ae7a-03d67e0d4c4f req-8a67baf7-6c3a-4b9c-a697-7082f69a2a5d service nova] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Refreshing network info cache for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 657.851685] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 657.874099] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 657.883055] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 657.883517] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 657.883679] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 657.883865] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 657.884228] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 657.884387] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 657.884796] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 657.884796] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 657.884922] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 657.885124] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 657.885264] env[62619]: DEBUG nova.virt.hardware [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 657.888190] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4040197f-fbe5-4384-a0ea-648e49665acc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.897208] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e5da97-056c-476c-8f1c-a97ef65422e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 657.990974] env[62619]: ERROR nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. [ 657.990974] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 657.990974] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.990974] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 657.990974] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 657.990974] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 657.990974] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 657.990974] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 657.990974] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.990974] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 657.990974] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.990974] env[62619]: ERROR nova.compute.manager raise self.value [ 657.990974] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 657.990974] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 657.990974] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.990974] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 657.991696] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.991696] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 657.991696] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. [ 657.991696] env[62619]: ERROR nova.compute.manager [ 657.991696] env[62619]: Traceback (most recent call last): [ 657.991696] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 657.991696] env[62619]: listener.cb(fileno) [ 657.991696] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.991696] env[62619]: result = function(*args, **kwargs) [ 657.991696] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 657.991696] env[62619]: return func(*args, **kwargs) [ 657.991696] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.991696] env[62619]: raise e [ 657.991696] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.991696] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 657.991696] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 657.991696] env[62619]: created_port_ids = self._update_ports_for_instance( [ 657.991696] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 657.991696] env[62619]: with excutils.save_and_reraise_exception(): [ 657.991696] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.991696] env[62619]: self.force_reraise() [ 657.991696] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.991696] env[62619]: raise self.value [ 657.991696] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 657.991696] env[62619]: updated_port = self._update_port( [ 657.991696] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.991696] env[62619]: _ensure_no_port_binding_failure(port) [ 657.991696] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.991696] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 657.993143] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. [ 657.993143] env[62619]: Removing descriptor: 17 [ 657.993143] env[62619]: ERROR nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Traceback (most recent call last): [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] yield resources [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self.driver.spawn(context, instance, image_meta, [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 657.993143] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] vm_ref = self.build_virtual_machine(instance, [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] for vif in network_info: [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] return self._sync_wrapper(fn, *args, **kwargs) [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self.wait() [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self[:] = self._gt.wait() [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] return self._exit_event.wait() [ 657.993430] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] result = hub.switch() [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] return self.greenlet.switch() [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] result = function(*args, **kwargs) [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] return func(*args, **kwargs) [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] raise e [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] nwinfo = self.network_api.allocate_for_instance( [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 657.993733] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] created_port_ids = self._update_ports_for_instance( [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] with excutils.save_and_reraise_exception(): [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self.force_reraise() [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] raise self.value [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] updated_port = self._update_port( [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] _ensure_no_port_binding_failure(port) [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 657.994037] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] raise exception.PortBindingFailed(port_id=port['id']) [ 657.994372] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] nova.exception.PortBindingFailed: Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. [ 657.994372] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] [ 657.994372] env[62619]: INFO nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Terminating instance [ 657.994372] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 658.260631] env[62619]: DEBUG oslo_concurrency.lockutils [None req-86c79606-d529-425c-9db8-c2cd4b8f9470 tempest-ServerShowV254Test-44959365 tempest-ServerShowV254Test-44959365-project-member] Lock "b7c425a1-a80d-4a62-a71f-d14fdf638cf7" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.144s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 658.309927] env[62619]: DEBUG nova.network.neutron [req-4ecf8d1b-f01e-4d36-ae7a-03d67e0d4c4f req-8a67baf7-6c3a-4b9c-a697-7082f69a2a5d service nova] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 658.445956] env[62619]: DEBUG nova.network.neutron [req-4ecf8d1b-f01e-4d36-ae7a-03d67e0d4c4f req-8a67baf7-6c3a-4b9c-a697-7082f69a2a5d service nova] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 658.590555] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a412a2-e3a5-4813-b25f-b62f9763c755 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.599635] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dee75908-51bc-40e5-a730-7732b285f198 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.638600] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-070f4883-ea70-456f-baed-24bf4899d406 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.646609] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6305011c-015f-45c5-b779-57979ca55d99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 658.660237] env[62619]: DEBUG nova.compute.provider_tree [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 658.948310] env[62619]: DEBUG oslo_concurrency.lockutils [req-4ecf8d1b-f01e-4d36-ae7a-03d67e0d4c4f req-8a67baf7-6c3a-4b9c-a697-7082f69a2a5d service nova] Releasing lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 658.949453] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 658.950674] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 659.163677] env[62619]: DEBUG nova.scheduler.client.report [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 659.476550] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 659.584563] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 659.669952] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.945s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 659.672289] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.576s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.712977] env[62619]: INFO nova.scheduler.client.report [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Deleted allocations for instance 79a94ed1-1139-4194-8091-00b7b1562330 [ 659.805697] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 659.806289] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 659.808058] env[62619]: DEBUG nova.compute.manager [req-79ff64cc-6d9f-4e45-873f-d4e03eee6bc2 req-d20e2003-c9b5-4b8e-ab51-14e236c7275b service nova] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Received event network-vif-deleted-4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 660.088807] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 660.089432] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 660.089663] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 660.090044] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d056157c-3e3b-4f61-9861-345b1d4d7624 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.099931] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d39193a-4258-4f31-97f1-a05e997247f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.123533] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf could not be found. [ 660.123747] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 660.124072] env[62619]: INFO nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Took 0.03 seconds to destroy the instance on the hypervisor. [ 660.124393] env[62619]: DEBUG oslo.service.loopingcall [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 660.124655] env[62619]: DEBUG nova.compute.manager [-] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 660.124780] env[62619]: DEBUG nova.network.neutron [-] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 660.227760] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b335eac8-4e7d-43c2-943b-af5816f38bd6 tempest-ServersAdmin275Test-1655766995 tempest-ServersAdmin275Test-1655766995-project-member] Lock "79a94ed1-1139-4194-8091-00b7b1562330" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 30.943s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 660.276116] env[62619]: DEBUG nova.network.neutron [-] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 660.562104] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6773a615-9059-4d2e-8ead-e6838624bf06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.570189] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8632c24b-ef6d-4c88-89e9-10e210980444 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.602079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcee5294-4bb3-449f-9d71-87ba5ef85910 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.609696] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7640096d-2777-4b7d-9f21-98c76d938068 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.623241] env[62619]: DEBUG nova.compute.provider_tree [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 660.778889] env[62619]: DEBUG nova.network.neutron [-] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.126592] env[62619]: DEBUG nova.scheduler.client.report [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 661.281690] env[62619]: INFO nova.compute.manager [-] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Took 1.16 seconds to deallocate network for instance. [ 661.286124] env[62619]: DEBUG nova.compute.claims [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 661.286322] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 661.632964] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.961s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 661.633640] env[62619]: ERROR nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Traceback (most recent call last): [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self.driver.spawn(context, instance, image_meta, [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] vm_ref = self.build_virtual_machine(instance, [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] vif_infos = vmwarevif.get_vif_info(self._session, [ 661.633640] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] for vif in network_info: [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] return self._sync_wrapper(fn, *args, **kwargs) [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self.wait() [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self[:] = self._gt.wait() [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] return self._exit_event.wait() [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] result = hub.switch() [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 661.633977] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] return self.greenlet.switch() [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] result = function(*args, **kwargs) [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] return func(*args, **kwargs) [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] raise e [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] nwinfo = self.network_api.allocate_for_instance( [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] created_port_ids = self._update_ports_for_instance( [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] with excutils.save_and_reraise_exception(): [ 661.634366] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] self.force_reraise() [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] raise self.value [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] updated_port = self._update_port( [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] _ensure_no_port_binding_failure(port) [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] raise exception.PortBindingFailed(port_id=port['id']) [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] nova.exception.PortBindingFailed: Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. [ 661.634693] env[62619]: ERROR nova.compute.manager [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] [ 661.634988] env[62619]: DEBUG nova.compute.utils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 661.635716] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.438s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 661.637335] env[62619]: INFO nova.compute.claims [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 661.640219] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Build of instance 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc was re-scheduled: Binding failed for port 758127a2-1efd-462c-94e0-cf1ca05545ae, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 661.640655] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 661.640889] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Acquiring lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 661.641033] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Acquired lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 661.641192] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 662.164980] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 662.266556] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 662.767913] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Releasing lock "refresh_cache-4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 662.768310] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 662.768492] env[62619]: DEBUG nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 662.768599] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 662.790626] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 663.069156] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8029db9b-c120-4be7-b492-5406f9d93a81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.078775] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f343c251-5077-4227-9286-3e6d3266c297 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.117307] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0328c33e-d664-4dba-b358-7aae717aac24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.121978] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483d4b76-c5f4-4ca4-b30a-7c012ddbe7d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.136296] env[62619]: DEBUG nova.compute.provider_tree [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 663.293527] env[62619]: DEBUG nova.network.neutron [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.639948] env[62619]: DEBUG nova.scheduler.client.report [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 663.800385] env[62619]: INFO nova.compute.manager [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] [instance: 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc] Took 1.03 seconds to deallocate network for instance. [ 664.147702] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.512s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 664.148931] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 664.154399] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.186s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.662875] env[62619]: DEBUG nova.compute.utils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 664.664399] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 664.665525] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 664.731964] env[62619]: DEBUG nova.policy [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '359ce2c1621c42229089e6e48d0e645f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'debbfd22f0504759b386c0d56a9320da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 664.827049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Acquiring lock "eda91c9e-886e-468e-b9eb-0435c1e94cd3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 664.830430] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Lock "eda91c9e-886e-468e-b9eb-0435c1e94cd3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 664.848539] env[62619]: INFO nova.scheduler.client.report [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Deleted allocations for instance 4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc [ 665.091029] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdd6e127-e208-41e3-a10d-1bc672026610 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.100021] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5167505-bdd7-4de3-893d-79205a6302e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.134537] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Successfully created port: ef14a719-827a-4b61-84cf-d6f4bf04ca6a {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 665.137168] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4ab2101-4b49-4759-bece-98b69f0f463c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.145536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ea7c9b3-4b99-4153-9562-1e9203a4a731 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 665.159982] env[62619]: DEBUG nova.compute.provider_tree [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 665.170344] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 665.357401] env[62619]: DEBUG oslo_concurrency.lockutils [None req-092ff824-0f8e-4d89-a4d6-b3b0c372388e tempest-ImagesOneServerTestJSON-480946824 tempest-ImagesOneServerTestJSON-480946824-project-member] Lock "4261dd1f-7a4c-4442-b7a8-ec3f3fdabefc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 81.936s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 665.663605] env[62619]: DEBUG nova.scheduler.client.report [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 665.859727] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 666.103825] env[62619]: DEBUG nova.compute.manager [req-80d7a948-8072-406a-ac47-79d09f07630c req-9171c4b0-60cb-495a-8955-5ba4baba6f20 service nova] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Received event network-changed-ef14a719-827a-4b61-84cf-d6f4bf04ca6a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 666.104043] env[62619]: DEBUG nova.compute.manager [req-80d7a948-8072-406a-ac47-79d09f07630c req-9171c4b0-60cb-495a-8955-5ba4baba6f20 service nova] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Refreshing instance network info cache due to event network-changed-ef14a719-827a-4b61-84cf-d6f4bf04ca6a. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 666.104311] env[62619]: DEBUG oslo_concurrency.lockutils [req-80d7a948-8072-406a-ac47-79d09f07630c req-9171c4b0-60cb-495a-8955-5ba4baba6f20 service nova] Acquiring lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.104403] env[62619]: DEBUG oslo_concurrency.lockutils [req-80d7a948-8072-406a-ac47-79d09f07630c req-9171c4b0-60cb-495a-8955-5ba4baba6f20 service nova] Acquired lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.104560] env[62619]: DEBUG nova.network.neutron [req-80d7a948-8072-406a-ac47-79d09f07630c req-9171c4b0-60cb-495a-8955-5ba4baba6f20 service nova] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Refreshing network info cache for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 666.170772] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.016s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.171444] env[62619]: ERROR nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Traceback (most recent call last): [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self.driver.spawn(context, instance, image_meta, [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] vm_ref = self.build_virtual_machine(instance, [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.171444] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] for vif in network_info: [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] return self._sync_wrapper(fn, *args, **kwargs) [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self.wait() [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self[:] = self._gt.wait() [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] return self._exit_event.wait() [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] current.throw(*self._exc) [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.171786] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] result = function(*args, **kwargs) [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] return func(*args, **kwargs) [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] raise e [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] nwinfo = self.network_api.allocate_for_instance( [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] created_port_ids = self._update_ports_for_instance( [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] with excutils.save_and_reraise_exception(): [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] self.force_reraise() [ 666.172157] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] raise self.value [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] updated_port = self._update_port( [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] _ensure_no_port_binding_failure(port) [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] raise exception.PortBindingFailed(port_id=port['id']) [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] nova.exception.PortBindingFailed: Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. [ 666.172475] env[62619]: ERROR nova.compute.manager [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] [ 666.172475] env[62619]: DEBUG nova.compute.utils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 666.173313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 24.282s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.173494] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 666.173636] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 666.173905] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.264s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 666.177928] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Build of instance c4ada5ac-4da7-4ae8-a9d9-577a83dd0359 was re-scheduled: Binding failed for port f88f8e1d-28c6-463a-a3b1-e82451ce6b58, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 666.178371] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 666.178592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.178731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquired lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 666.178936] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 666.180394] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91cbce02-3071-4359-8458-b1faa7f90e5e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.187355] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 666.195676] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ae4199-0b75-427c-8ea4-49f066afd40d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.210674] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c232f0d0-affa-4dd8-ba01-7088d45ba4ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.219771] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdaca1c2-92a7-4b52-b17c-396bd1f0856c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.226026] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 666.226561] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 666.226820] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 666.227104] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 666.227348] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 666.227686] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 666.228158] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 666.228428] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 666.228698] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 666.229030] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 666.229315] env[62619]: DEBUG nova.virt.hardware [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 666.230704] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38300324-c54f-4b69-8457-e0ded4a73f27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.262536] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181275MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 666.262961] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.264446] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e3db004-133b-4abc-8c95-196898710ed1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 666.379264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 666.425801] env[62619]: ERROR nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. [ 666.425801] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 666.425801] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.425801] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 666.425801] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.425801] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 666.425801] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.425801] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 666.425801] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.425801] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 666.425801] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.425801] env[62619]: ERROR nova.compute.manager raise self.value [ 666.425801] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.425801] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 666.425801] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.425801] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 666.426300] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.426300] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 666.426300] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. [ 666.426300] env[62619]: ERROR nova.compute.manager [ 666.426300] env[62619]: Traceback (most recent call last): [ 666.426300] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 666.426300] env[62619]: listener.cb(fileno) [ 666.426300] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.426300] env[62619]: result = function(*args, **kwargs) [ 666.426300] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.426300] env[62619]: return func(*args, **kwargs) [ 666.426300] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.426300] env[62619]: raise e [ 666.426300] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.426300] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 666.426300] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.426300] env[62619]: created_port_ids = self._update_ports_for_instance( [ 666.426300] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.426300] env[62619]: with excutils.save_and_reraise_exception(): [ 666.426300] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.426300] env[62619]: self.force_reraise() [ 666.426300] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.426300] env[62619]: raise self.value [ 666.426300] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.426300] env[62619]: updated_port = self._update_port( [ 666.426300] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.426300] env[62619]: _ensure_no_port_binding_failure(port) [ 666.426300] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.426300] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 666.427216] env[62619]: nova.exception.PortBindingFailed: Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. [ 666.427216] env[62619]: Removing descriptor: 17 [ 666.427216] env[62619]: ERROR nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Traceback (most recent call last): [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] yield resources [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self.driver.spawn(context, instance, image_meta, [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 666.427216] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] vm_ref = self.build_virtual_machine(instance, [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] for vif in network_info: [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] return self._sync_wrapper(fn, *args, **kwargs) [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self.wait() [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self[:] = self._gt.wait() [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] return self._exit_event.wait() [ 666.427701] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] result = hub.switch() [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] return self.greenlet.switch() [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] result = function(*args, **kwargs) [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] return func(*args, **kwargs) [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] raise e [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] nwinfo = self.network_api.allocate_for_instance( [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 666.428189] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] created_port_ids = self._update_ports_for_instance( [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] with excutils.save_and_reraise_exception(): [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self.force_reraise() [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] raise self.value [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] updated_port = self._update_port( [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] _ensure_no_port_binding_failure(port) [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 666.428539] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] raise exception.PortBindingFailed(port_id=port['id']) [ 666.428906] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] nova.exception.PortBindingFailed: Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. [ 666.428906] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] [ 666.428906] env[62619]: INFO nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Terminating instance [ 666.429316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 666.626934] env[62619]: DEBUG nova.network.neutron [req-80d7a948-8072-406a-ac47-79d09f07630c req-9171c4b0-60cb-495a-8955-5ba4baba6f20 service nova] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.714037] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 666.775614] env[62619]: DEBUG nova.network.neutron [req-80d7a948-8072-406a-ac47-79d09f07630c req-9171c4b0-60cb-495a-8955-5ba4baba6f20 service nova] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 666.848632] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.054886] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6a136ff-cc11-4a54-bec8-545031c61a60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.064750] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f460a248-ec60-4cd8-a00a-95244c9cdc78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.097883] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2c7a34c-23dd-46a2-bae5-a35df454b743 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.105376] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc042fd2-1be6-41ef-af86-91fd2fde3a4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 667.118612] env[62619]: DEBUG nova.compute.provider_tree [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 667.281054] env[62619]: DEBUG oslo_concurrency.lockutils [req-80d7a948-8072-406a-ac47-79d09f07630c req-9171c4b0-60cb-495a-8955-5ba4baba6f20 service nova] Releasing lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.281054] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquired lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 667.281191] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 667.354452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Releasing lock "refresh_cache-c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 667.355030] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 667.355030] env[62619]: DEBUG nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 667.355146] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 667.372588] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.622070] env[62619]: DEBUG nova.scheduler.client.report [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 667.809145] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 667.874555] env[62619]: DEBUG nova.network.neutron [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 667.930092] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.127369] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.953s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 668.128198] env[62619]: ERROR nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Traceback (most recent call last): [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self.driver.spawn(context, instance, image_meta, [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self._vmops.spawn(context, instance, image_meta, injected_files, [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] vm_ref = self.build_virtual_machine(instance, [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] vif_infos = vmwarevif.get_vif_info(self._session, [ 668.128198] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] for vif in network_info: [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] return self._sync_wrapper(fn, *args, **kwargs) [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self.wait() [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self[:] = self._gt.wait() [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] return self._exit_event.wait() [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] result = hub.switch() [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 668.128663] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] return self.greenlet.switch() [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] result = function(*args, **kwargs) [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] return func(*args, **kwargs) [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] raise e [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] nwinfo = self.network_api.allocate_for_instance( [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] created_port_ids = self._update_ports_for_instance( [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] with excutils.save_and_reraise_exception(): [ 668.129056] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] self.force_reraise() [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] raise self.value [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] updated_port = self._update_port( [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] _ensure_no_port_binding_failure(port) [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] raise exception.PortBindingFailed(port_id=port['id']) [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] nova.exception.PortBindingFailed: Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. [ 668.129430] env[62619]: ERROR nova.compute.manager [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] [ 668.129901] env[62619]: DEBUG nova.compute.utils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 668.131128] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 22.841s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 668.132542] env[62619]: INFO nova.compute.claims [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 668.135597] env[62619]: DEBUG nova.compute.manager [req-7223d6ed-078d-4821-b95d-da3b9d061d49 req-da9a0501-2bad-4744-bca4-700e9662980d service nova] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Received event network-vif-deleted-ef14a719-827a-4b61-84cf-d6f4bf04ca6a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 668.136121] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Build of instance 6f32ec6e-af29-4c0e-8f8b-708cbd1af474 was re-scheduled: Binding failed for port 037f72ab-0bda-4c8e-abf0-a84b1d9d5817, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 668.136571] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 668.136789] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 668.136932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquired lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 668.137101] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 668.377254] env[62619]: INFO nova.compute.manager [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: c4ada5ac-4da7-4ae8-a9d9-577a83dd0359] Took 1.02 seconds to deallocate network for instance. [ 668.432880] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Releasing lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 668.433367] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 668.433555] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 668.434187] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-92b23c96-1d10-449a-897c-e9a9dfb86147 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.442991] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-875a52f0-638c-4b5d-b358-cba6bb2c69c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 668.463341] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 500c7408-7c73-4111-8d96-9090416e73f1 could not be found. [ 668.463585] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 668.463803] env[62619]: INFO nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 668.464136] env[62619]: DEBUG oslo.service.loopingcall [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 668.464405] env[62619]: DEBUG nova.compute.manager [-] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 668.464526] env[62619]: DEBUG nova.network.neutron [-] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 668.480007] env[62619]: DEBUG nova.network.neutron [-] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 668.664673] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 668.746199] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 668.985455] env[62619]: DEBUG nova.network.neutron [-] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.249598] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Releasing lock "refresh_cache-6f32ec6e-af29-4c0e-8f8b-708cbd1af474" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 669.249925] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 669.249994] env[62619]: DEBUG nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 669.250305] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 669.267580] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 669.411036] env[62619]: INFO nova.scheduler.client.report [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Deleted allocations for instance c4ada5ac-4da7-4ae8-a9d9-577a83dd0359 [ 669.484868] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38fcd7d3-64b1-4800-a77d-61816a73b9b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.487971] env[62619]: INFO nova.compute.manager [-] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Took 1.02 seconds to deallocate network for instance. [ 669.490026] env[62619]: DEBUG nova.compute.claims [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 669.490240] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 669.494858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dddab32f-f14b-4071-8d7d-e04932df3a7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.526016] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a5b620-6a34-4642-b511-33b730b978a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.533601] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17eb876d-5375-4b0b-a346-cf43b521aa0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 669.547326] env[62619]: DEBUG nova.compute.provider_tree [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 669.770727] env[62619]: DEBUG nova.network.neutron [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 669.925756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae942943-aba1-4ac7-8081-0f74ebfc3a99 tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "c4ada5ac-4da7-4ae8-a9d9-577a83dd0359" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 85.088s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.049160] env[62619]: DEBUG nova.scheduler.client.report [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 670.274082] env[62619]: INFO nova.compute.manager [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: 6f32ec6e-af29-4c0e-8f8b-708cbd1af474] Took 1.02 seconds to deallocate network for instance. [ 670.428383] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 670.556330] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.425s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 670.556882] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 670.559871] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.344s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.561399] env[62619]: INFO nova.compute.claims [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 670.606616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Acquiring lock "eedbd5eb-e431-477e-a817-acb8f54fa511" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 670.606616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Lock "eedbd5eb-e431-477e-a817-acb8f54fa511" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 670.948906] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.065959] env[62619]: DEBUG nova.compute.utils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 671.069473] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 671.069638] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 671.113430] env[62619]: DEBUG nova.policy [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1e90a23c6444273bc10051f3227804c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '998daea123aa48b2816d1cbe9e662950', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 671.310207] env[62619]: INFO nova.scheduler.client.report [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Deleted allocations for instance 6f32ec6e-af29-4c0e-8f8b-708cbd1af474 [ 671.378281] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "5ffd77a0-df9a-461c-837e-05b4ff66ea52" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 671.378729] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "5ffd77a0-df9a-461c-837e-05b4ff66ea52" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 671.419134] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Successfully created port: 35df3843-7749-4475-85b7-7690b6ddf4f3 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 671.573521] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 671.818888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3730a289-9b9b-4eca-a295-33316f047809 tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "6f32ec6e-af29-4c0e-8f8b-708cbd1af474" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 86.016s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 671.931083] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19a1bc46-6a1d-427f-bf19-652ae32e9970 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.939562] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b367e5-9190-4b3b-8f36-4921652eee1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.971140] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-477a7f64-5434-4fee-9859-035613415364 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.978475] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85deaad4-96a2-419d-854f-7b8cff73a58c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 671.991645] env[62619]: DEBUG nova.compute.provider_tree [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 672.156845] env[62619]: DEBUG nova.compute.manager [req-cade9d9d-18c0-424a-a788-884d7af7a5a6 req-26dd216d-7a89-4b57-9d9f-3846582ff118 service nova] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Received event network-changed-35df3843-7749-4475-85b7-7690b6ddf4f3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 672.157702] env[62619]: DEBUG nova.compute.manager [req-cade9d9d-18c0-424a-a788-884d7af7a5a6 req-26dd216d-7a89-4b57-9d9f-3846582ff118 service nova] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Refreshing instance network info cache due to event network-changed-35df3843-7749-4475-85b7-7690b6ddf4f3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 672.158016] env[62619]: DEBUG oslo_concurrency.lockutils [req-cade9d9d-18c0-424a-a788-884d7af7a5a6 req-26dd216d-7a89-4b57-9d9f-3846582ff118 service nova] Acquiring lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.158186] env[62619]: DEBUG oslo_concurrency.lockutils [req-cade9d9d-18c0-424a-a788-884d7af7a5a6 req-26dd216d-7a89-4b57-9d9f-3846582ff118 service nova] Acquired lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 672.158352] env[62619]: DEBUG nova.network.neutron [req-cade9d9d-18c0-424a-a788-884d7af7a5a6 req-26dd216d-7a89-4b57-9d9f-3846582ff118 service nova] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Refreshing network info cache for port 35df3843-7749-4475-85b7-7690b6ddf4f3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 672.291458] env[62619]: ERROR nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. [ 672.291458] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 672.291458] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.291458] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 672.291458] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.291458] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 672.291458] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.291458] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 672.291458] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.291458] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 672.291458] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.291458] env[62619]: ERROR nova.compute.manager raise self.value [ 672.291458] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.291458] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 672.291458] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.291458] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 672.291895] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.291895] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 672.291895] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. [ 672.291895] env[62619]: ERROR nova.compute.manager [ 672.291895] env[62619]: Traceback (most recent call last): [ 672.291895] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 672.291895] env[62619]: listener.cb(fileno) [ 672.291895] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.291895] env[62619]: result = function(*args, **kwargs) [ 672.291895] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.291895] env[62619]: return func(*args, **kwargs) [ 672.291895] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.291895] env[62619]: raise e [ 672.291895] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.291895] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 672.291895] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.291895] env[62619]: created_port_ids = self._update_ports_for_instance( [ 672.291895] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.291895] env[62619]: with excutils.save_and_reraise_exception(): [ 672.291895] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.291895] env[62619]: self.force_reraise() [ 672.291895] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.291895] env[62619]: raise self.value [ 672.291895] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.291895] env[62619]: updated_port = self._update_port( [ 672.291895] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.291895] env[62619]: _ensure_no_port_binding_failure(port) [ 672.291895] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.291895] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 672.292718] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. [ 672.292718] env[62619]: Removing descriptor: 18 [ 672.322159] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 672.495124] env[62619]: DEBUG nova.scheduler.client.report [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 672.580830] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 672.609842] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 672.610099] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 672.610259] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 672.610437] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 672.610579] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 672.610723] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 672.610921] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 672.611091] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 672.611255] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 672.611411] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 672.611571] env[62619]: DEBUG nova.virt.hardware [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 672.613485] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-319411be-0826-404e-8e93-b30b08a755da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.621848] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7434a7af-2566-4bc1-acaf-61a326021ae8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 672.635120] env[62619]: ERROR nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Traceback (most recent call last): [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] yield resources [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self.driver.spawn(context, instance, image_meta, [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] vm_ref = self.build_virtual_machine(instance, [ 672.635120] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] vif_infos = vmwarevif.get_vif_info(self._session, [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] for vif in network_info: [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] return self._sync_wrapper(fn, *args, **kwargs) [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self.wait() [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self[:] = self._gt.wait() [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] return self._exit_event.wait() [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 672.635461] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] current.throw(*self._exc) [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] result = function(*args, **kwargs) [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] return func(*args, **kwargs) [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] raise e [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] nwinfo = self.network_api.allocate_for_instance( [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] created_port_ids = self._update_ports_for_instance( [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] with excutils.save_and_reraise_exception(): [ 672.635867] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self.force_reraise() [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] raise self.value [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] updated_port = self._update_port( [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] _ensure_no_port_binding_failure(port) [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] raise exception.PortBindingFailed(port_id=port['id']) [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] nova.exception.PortBindingFailed: Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. [ 672.636352] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] [ 672.636352] env[62619]: INFO nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Terminating instance [ 672.637807] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 672.680235] env[62619]: DEBUG nova.network.neutron [req-cade9d9d-18c0-424a-a788-884d7af7a5a6 req-26dd216d-7a89-4b57-9d9f-3846582ff118 service nova] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 672.753720] env[62619]: DEBUG nova.network.neutron [req-cade9d9d-18c0-424a-a788-884d7af7a5a6 req-26dd216d-7a89-4b57-9d9f-3846582ff118 service nova] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 672.846825] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.000596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.441s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 673.001124] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 673.003697] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.357s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.256352] env[62619]: DEBUG oslo_concurrency.lockutils [req-cade9d9d-18c0-424a-a788-884d7af7a5a6 req-26dd216d-7a89-4b57-9d9f-3846582ff118 service nova] Releasing lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 673.256791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 673.256981] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 673.512521] env[62619]: DEBUG nova.compute.utils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 673.515029] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 673.515121] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 673.519277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 673.519532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 673.571880] env[62619]: DEBUG nova.policy [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4b7f289af8d345949a3c3cba8821c545', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90c788067d2640bfa69e51fb796fffa0', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 673.780846] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 673.828224] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Successfully created port: 57ea30d5-424d-4a58-8b25-e3a3ba75440a {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 673.845026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc7aa3f6-1ee7-48a6-916a-6c80b5f48bf9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.852285] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-531f9eff-4809-4679-ad87-12e069ae10d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.883718] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 673.885527] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67869fa1-d78d-4867-b463-7c6940998502 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.893355] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc4ef9ab-1ccd-4143-a7c1-5ea120f9d729 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 673.907452] env[62619]: DEBUG nova.compute.provider_tree [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 674.022168] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 674.183190] env[62619]: DEBUG nova.compute.manager [req-6021acfe-7df1-42d5-a1d8-e04eca2888dd req-32c9c8da-d9f0-4088-9f02-b27aaded6f3d service nova] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Received event network-vif-deleted-35df3843-7749-4475-85b7-7690b6ddf4f3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 674.390169] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 674.390724] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 674.390812] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 674.391120] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cf6f9689-b3c3-492c-bfce-4c79b99e2079 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.400343] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cff0a378-5fc5-433b-97b7-b27c05d821dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 674.410963] env[62619]: DEBUG nova.scheduler.client.report [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 674.426343] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance aa89e902-7394-49d5-b6aa-8e9d11548cc5 could not be found. [ 674.426544] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 674.426724] env[62619]: INFO nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 674.426962] env[62619]: DEBUG oslo.service.loopingcall [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 674.427204] env[62619]: DEBUG nova.compute.manager [-] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 674.427298] env[62619]: DEBUG nova.network.neutron [-] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 674.442365] env[62619]: DEBUG nova.network.neutron [-] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 674.688397] env[62619]: ERROR nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. [ 674.688397] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 674.688397] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 674.688397] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 674.688397] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.688397] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 674.688397] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.688397] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 674.688397] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.688397] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 674.688397] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.688397] env[62619]: ERROR nova.compute.manager raise self.value [ 674.688397] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.688397] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 674.688397] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.688397] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 674.689232] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.689232] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 674.689232] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. [ 674.689232] env[62619]: ERROR nova.compute.manager [ 674.689232] env[62619]: Traceback (most recent call last): [ 674.689232] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 674.689232] env[62619]: listener.cb(fileno) [ 674.689232] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 674.689232] env[62619]: result = function(*args, **kwargs) [ 674.689232] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 674.689232] env[62619]: return func(*args, **kwargs) [ 674.689232] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 674.689232] env[62619]: raise e [ 674.689232] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 674.689232] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 674.689232] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.689232] env[62619]: created_port_ids = self._update_ports_for_instance( [ 674.689232] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.689232] env[62619]: with excutils.save_and_reraise_exception(): [ 674.689232] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.689232] env[62619]: self.force_reraise() [ 674.689232] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.689232] env[62619]: raise self.value [ 674.689232] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.689232] env[62619]: updated_port = self._update_port( [ 674.689232] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.689232] env[62619]: _ensure_no_port_binding_failure(port) [ 674.689232] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.689232] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 674.690136] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. [ 674.690136] env[62619]: Removing descriptor: 18 [ 674.917609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.914s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 674.918319] env[62619]: ERROR nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Traceback (most recent call last): [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self.driver.spawn(context, instance, image_meta, [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self._vmops.spawn(context, instance, image_meta, injected_files, [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] vm_ref = self.build_virtual_machine(instance, [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] vif_infos = vmwarevif.get_vif_info(self._session, [ 674.918319] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] for vif in network_info: [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] return self._sync_wrapper(fn, *args, **kwargs) [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self.wait() [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self[:] = self._gt.wait() [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] return self._exit_event.wait() [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] result = hub.switch() [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 674.918688] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] return self.greenlet.switch() [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] result = function(*args, **kwargs) [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] return func(*args, **kwargs) [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] raise e [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] nwinfo = self.network_api.allocate_for_instance( [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] created_port_ids = self._update_ports_for_instance( [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] with excutils.save_and_reraise_exception(): [ 674.919030] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] self.force_reraise() [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] raise self.value [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] updated_port = self._update_port( [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] _ensure_no_port_binding_failure(port) [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] raise exception.PortBindingFailed(port_id=port['id']) [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] nova.exception.PortBindingFailed: Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. [ 674.919360] env[62619]: ERROR nova.compute.manager [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] [ 674.919679] env[62619]: DEBUG nova.compute.utils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 674.920322] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 22.973s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 674.923287] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Build of instance e145f3d4-fb6e-4c14-abdf-d85596fb0218 was re-scheduled: Binding failed for port 15ce93e9-3a9b-44fa-a8a7-2c0c56c581ad, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 674.923724] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 674.923967] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 674.924129] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquired lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 674.924288] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 674.944888] env[62619]: DEBUG nova.network.neutron [-] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.028376] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 675.052789] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 675.053068] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 675.053290] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 675.053487] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 675.053629] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 675.053772] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 675.053972] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 675.054128] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 675.054297] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 675.054448] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 675.054608] env[62619]: DEBUG nova.virt.hardware [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 675.055457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b1e7ab-0e16-4203-aced-9c66e1626c62 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.063972] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7ae3e5-de09-4f43-97ea-b81969a72bf2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.078462] env[62619]: ERROR nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Traceback (most recent call last): [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] yield resources [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self.driver.spawn(context, instance, image_meta, [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] vm_ref = self.build_virtual_machine(instance, [ 675.078462] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] vif_infos = vmwarevif.get_vif_info(self._session, [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] for vif in network_info: [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] return self._sync_wrapper(fn, *args, **kwargs) [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self.wait() [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self[:] = self._gt.wait() [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] return self._exit_event.wait() [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 675.078762] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] current.throw(*self._exc) [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] result = function(*args, **kwargs) [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] return func(*args, **kwargs) [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] raise e [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] nwinfo = self.network_api.allocate_for_instance( [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] created_port_ids = self._update_ports_for_instance( [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] with excutils.save_and_reraise_exception(): [ 675.079129] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self.force_reraise() [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] raise self.value [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] updated_port = self._update_port( [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] _ensure_no_port_binding_failure(port) [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] raise exception.PortBindingFailed(port_id=port['id']) [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] nova.exception.PortBindingFailed: Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. [ 675.079447] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] [ 675.079447] env[62619]: INFO nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Terminating instance [ 675.080774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 675.080931] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquired lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 675.081107] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 675.446280] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 675.448991] env[62619]: INFO nova.compute.manager [-] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Took 1.02 seconds to deallocate network for instance. [ 675.452069] env[62619]: DEBUG nova.compute.claims [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 675.452122] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 675.536292] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.601619] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 675.681054] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 675.730093] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b91b6622-52fc-4a6a-ab37-0ab029c7d363 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.739537] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-892e6c8c-ce31-428b-ae9e-5fdd83d06b42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.771449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ba0fbad-7f95-4e65-86d3-b4cbae4c3917 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.779449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab5f1339-3ed3-423c-84ee-c8e515c8a0fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 675.794023] env[62619]: DEBUG nova.compute.provider_tree [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 676.039155] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Releasing lock "refresh_cache-e145f3d4-fb6e-4c14-abdf-d85596fb0218" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.039393] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 676.039574] env[62619]: DEBUG nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 676.039739] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 676.054694] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 676.183721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Releasing lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 676.184286] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 676.184468] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 676.184844] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-aac761f9-20fc-4b70-8c6f-653481addb33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.194026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-043d322f-b768-4304-ad56-54ec5520025d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 676.214901] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 06b595af-8ca9-444a-974c-135bf87a2ec5 could not be found. [ 676.215044] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 676.215250] env[62619]: INFO nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Took 0.03 seconds to destroy the instance on the hypervisor. [ 676.215530] env[62619]: DEBUG oslo.service.loopingcall [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 676.215813] env[62619]: DEBUG nova.compute.manager [-] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 676.216053] env[62619]: DEBUG nova.network.neutron [-] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 676.233455] env[62619]: DEBUG nova.compute.manager [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Received event network-changed-57ea30d5-424d-4a58-8b25-e3a3ba75440a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 676.233732] env[62619]: DEBUG nova.compute.manager [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Refreshing instance network info cache due to event network-changed-57ea30d5-424d-4a58-8b25-e3a3ba75440a. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 676.234050] env[62619]: DEBUG oslo_concurrency.lockutils [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] Acquiring lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.234311] env[62619]: DEBUG oslo_concurrency.lockutils [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] Acquired lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.234506] env[62619]: DEBUG nova.network.neutron [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Refreshing network info cache for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 676.236743] env[62619]: DEBUG nova.network.neutron [-] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 676.297640] env[62619]: DEBUG nova.scheduler.client.report [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 676.557361] env[62619]: DEBUG nova.network.neutron [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.741395] env[62619]: DEBUG nova.network.neutron [-] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 676.755217] env[62619]: DEBUG nova.network.neutron [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 676.802808] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.882s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 676.803436] env[62619]: ERROR nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Traceback (most recent call last): [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self.driver.spawn(context, instance, image_meta, [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] vm_ref = self.build_virtual_machine(instance, [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] vif_infos = vmwarevif.get_vif_info(self._session, [ 676.803436] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] for vif in network_info: [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] return self._sync_wrapper(fn, *args, **kwargs) [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self.wait() [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self[:] = self._gt.wait() [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] return self._exit_event.wait() [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] result = hub.switch() [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 676.803773] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] return self.greenlet.switch() [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] result = function(*args, **kwargs) [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] return func(*args, **kwargs) [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] raise e [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] nwinfo = self.network_api.allocate_for_instance( [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] created_port_ids = self._update_ports_for_instance( [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] with excutils.save_and_reraise_exception(): [ 676.804114] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] self.force_reraise() [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] raise self.value [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] updated_port = self._update_port( [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] _ensure_no_port_binding_failure(port) [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] raise exception.PortBindingFailed(port_id=port['id']) [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] nova.exception.PortBindingFailed: Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. [ 676.804513] env[62619]: ERROR nova.compute.manager [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] [ 676.804828] env[62619]: DEBUG nova.compute.utils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 676.805259] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 18.936s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 676.806762] env[62619]: INFO nova.compute.claims [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 676.809594] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Build of instance 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8 was re-scheduled: Binding failed for port 8edd1ac1-581d-4f5a-9073-1afdc0ea3484, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 676.809938] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 676.810208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquiring lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 676.810304] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Acquired lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 676.810453] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 676.824455] env[62619]: DEBUG nova.network.neutron [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.060927] env[62619]: INFO nova.compute.manager [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: e145f3d4-fb6e-4c14-abdf-d85596fb0218] Took 1.02 seconds to deallocate network for instance. [ 677.243654] env[62619]: INFO nova.compute.manager [-] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Took 1.03 seconds to deallocate network for instance. [ 677.245987] env[62619]: DEBUG nova.compute.claims [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 677.246194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 677.329764] env[62619]: DEBUG oslo_concurrency.lockutils [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] Releasing lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.330027] env[62619]: DEBUG nova.compute.manager [req-5be88679-2ffd-4c84-84eb-cdd2009bf259 req-ead37743-59f3-4750-ab48-a4b47099d1f0 service nova] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Received event network-vif-deleted-57ea30d5-424d-4a58-8b25-e3a3ba75440a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 677.330996] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 677.410354] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 677.912704] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Releasing lock "refresh_cache-4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 677.912992] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 677.913119] env[62619]: DEBUG nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 677.913288] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 677.929127] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 678.086480] env[62619]: INFO nova.scheduler.client.report [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Deleted allocations for instance e145f3d4-fb6e-4c14-abdf-d85596fb0218 [ 678.094288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d1ae1ff-bb67-4f64-a456-e839387cf8e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.101818] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d858040-b480-454f-8264-c4741f201db1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.133647] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3a89d50-a8eb-4286-9d0b-6ba11b8d5679 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.141428] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-056d7714-cab6-4700-a5f1-1dbbfd75638f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 678.155877] env[62619]: DEBUG nova.compute.provider_tree [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 678.431781] env[62619]: DEBUG nova.network.neutron [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 678.598771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-85edbb8c-adae-42d0-bde6-e0b4f41071e8 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "e145f3d4-fb6e-4c14-abdf-d85596fb0218" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.302s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 678.659439] env[62619]: DEBUG nova.scheduler.client.report [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 678.934615] env[62619]: INFO nova.compute.manager [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] [instance: 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8] Took 1.02 seconds to deallocate network for instance. [ 679.101224] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 679.164895] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.359s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 679.165458] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 679.168253] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 17.882s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 679.622386] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 679.677062] env[62619]: DEBUG nova.compute.utils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 679.678978] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 679.679235] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 679.722448] env[62619]: DEBUG nova.policy [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '006eb6e2b00e4121a623567f9b515be8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'eadc48ab6a004882adafacc9ec54bd73', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 679.967273] env[62619]: INFO nova.scheduler.client.report [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Deleted allocations for instance 4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8 [ 680.022584] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d463c3f6-1c5c-4912-bbf0-75e432eb1744 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.032635] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f9eb8aa-cf09-470d-85a3-52df0bd86312 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.062356] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Successfully created port: f8db1889-6665-4e35-9af5-868daca48f8c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 680.064732] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89bad9c5-4798-46d4-bfea-ca2bc6dc895c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.072715] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9c8bbb5-2332-47e6-8fd3-9b8c95d16e16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 680.086537] env[62619]: DEBUG nova.compute.provider_tree [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 680.181982] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 680.480093] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a3abd5d4-8bb9-4577-b76c-3437193bf217 tempest-ServersAdminTestJSON-22452663 tempest-ServersAdminTestJSON-22452663-project-member] Lock "4fbcc9f7-c8dd-497d-a001-17c85fb3d6c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 92.744s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 680.589731] env[62619]: DEBUG nova.scheduler.client.report [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 680.748142] env[62619]: DEBUG nova.compute.manager [req-74974e47-1d1d-4087-8d91-1ba32b824263 req-58f29989-e3a5-4353-a427-562dcf201154 service nova] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Received event network-changed-f8db1889-6665-4e35-9af5-868daca48f8c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 680.748358] env[62619]: DEBUG nova.compute.manager [req-74974e47-1d1d-4087-8d91-1ba32b824263 req-58f29989-e3a5-4353-a427-562dcf201154 service nova] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Refreshing instance network info cache due to event network-changed-f8db1889-6665-4e35-9af5-868daca48f8c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 680.748575] env[62619]: DEBUG oslo_concurrency.lockutils [req-74974e47-1d1d-4087-8d91-1ba32b824263 req-58f29989-e3a5-4353-a427-562dcf201154 service nova] Acquiring lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 680.748718] env[62619]: DEBUG oslo_concurrency.lockutils [req-74974e47-1d1d-4087-8d91-1ba32b824263 req-58f29989-e3a5-4353-a427-562dcf201154 service nova] Acquired lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 680.749269] env[62619]: DEBUG nova.network.neutron [req-74974e47-1d1d-4087-8d91-1ba32b824263 req-58f29989-e3a5-4353-a427-562dcf201154 service nova] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Refreshing network info cache for port f8db1889-6665-4e35-9af5-868daca48f8c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 680.915582] env[62619]: ERROR nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. [ 680.915582] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 680.915582] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.915582] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 680.915582] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 680.915582] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 680.915582] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 680.915582] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 680.915582] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.915582] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 680.915582] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.915582] env[62619]: ERROR nova.compute.manager raise self.value [ 680.915582] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 680.915582] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 680.915582] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.915582] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 680.916098] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.916098] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 680.916098] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. [ 680.916098] env[62619]: ERROR nova.compute.manager [ 680.916098] env[62619]: Traceback (most recent call last): [ 680.916098] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 680.916098] env[62619]: listener.cb(fileno) [ 680.916098] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 680.916098] env[62619]: result = function(*args, **kwargs) [ 680.916098] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 680.916098] env[62619]: return func(*args, **kwargs) [ 680.916098] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 680.916098] env[62619]: raise e [ 680.916098] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 680.916098] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 680.916098] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 680.916098] env[62619]: created_port_ids = self._update_ports_for_instance( [ 680.916098] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 680.916098] env[62619]: with excutils.save_and_reraise_exception(): [ 680.916098] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 680.916098] env[62619]: self.force_reraise() [ 680.916098] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 680.916098] env[62619]: raise self.value [ 680.916098] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 680.916098] env[62619]: updated_port = self._update_port( [ 680.916098] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 680.916098] env[62619]: _ensure_no_port_binding_failure(port) [ 680.916098] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 680.916098] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 680.916917] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. [ 680.916917] env[62619]: Removing descriptor: 18 [ 680.982662] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 681.096624] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.928s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 681.097289] env[62619]: ERROR nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Traceback (most recent call last): [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self.driver.spawn(context, instance, image_meta, [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] vm_ref = self.build_virtual_machine(instance, [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.097289] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] for vif in network_info: [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] return self._sync_wrapper(fn, *args, **kwargs) [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self.wait() [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self[:] = self._gt.wait() [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] return self._exit_event.wait() [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] result = hub.switch() [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 681.097633] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] return self.greenlet.switch() [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] result = function(*args, **kwargs) [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] return func(*args, **kwargs) [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] raise e [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] nwinfo = self.network_api.allocate_for_instance( [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] created_port_ids = self._update_ports_for_instance( [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] with excutils.save_and_reraise_exception(): [ 681.098113] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] self.force_reraise() [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] raise self.value [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] updated_port = self._update_port( [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] _ensure_no_port_binding_failure(port) [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] raise exception.PortBindingFailed(port_id=port['id']) [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] nova.exception.PortBindingFailed: Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. [ 681.098479] env[62619]: ERROR nova.compute.manager [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] [ 681.098780] env[62619]: DEBUG nova.compute.utils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 681.099616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 14.837s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 681.102211] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Build of instance 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf was re-scheduled: Binding failed for port 4e2f7ab7-45bc-44ce-b4c3-47ebc5b1532d, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 681.102211] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 681.102211] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.102211] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.102450] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 681.191345] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 681.215836] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 681.216133] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 681.216296] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 681.216473] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 681.216611] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 681.216750] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 681.216949] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 681.217122] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 681.217281] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 681.217437] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 681.217668] env[62619]: DEBUG nova.virt.hardware [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 681.218552] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c95c972-4f5b-4320-aef5-b678f15fc15f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.226648] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e7f690f-044e-4c98-97a8-b6ee26575d99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 681.240049] env[62619]: ERROR nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Traceback (most recent call last): [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] yield resources [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self.driver.spawn(context, instance, image_meta, [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] vm_ref = self.build_virtual_machine(instance, [ 681.240049] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] vif_infos = vmwarevif.get_vif_info(self._session, [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] for vif in network_info: [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] return self._sync_wrapper(fn, *args, **kwargs) [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self.wait() [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self[:] = self._gt.wait() [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] return self._exit_event.wait() [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 681.240435] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] current.throw(*self._exc) [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] result = function(*args, **kwargs) [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] return func(*args, **kwargs) [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] raise e [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] nwinfo = self.network_api.allocate_for_instance( [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] created_port_ids = self._update_ports_for_instance( [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] with excutils.save_and_reraise_exception(): [ 681.240847] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self.force_reraise() [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] raise self.value [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] updated_port = self._update_port( [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] _ensure_no_port_binding_failure(port) [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] raise exception.PortBindingFailed(port_id=port['id']) [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] nova.exception.PortBindingFailed: Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. [ 681.241171] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] [ 681.241171] env[62619]: INFO nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Terminating instance [ 681.242443] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Acquiring lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 681.268889] env[62619]: DEBUG nova.network.neutron [req-74974e47-1d1d-4087-8d91-1ba32b824263 req-58f29989-e3a5-4353-a427-562dcf201154 service nova] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 681.352664] env[62619]: DEBUG nova.network.neutron [req-74974e47-1d1d-4087-8d91-1ba32b824263 req-58f29989-e3a5-4353-a427-562dcf201154 service nova] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.508212] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 681.624791] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 681.704951] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 681.855512] env[62619]: DEBUG oslo_concurrency.lockutils [req-74974e47-1d1d-4087-8d91-1ba32b824263 req-58f29989-e3a5-4353-a427-562dcf201154 service nova] Releasing lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 681.855943] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Acquired lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 681.856144] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 682.207636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.208063] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 682.208131] env[62619]: DEBUG nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 682.208280] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 682.224509] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.375114] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 682.444024] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.646930] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 682.646930] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 500c7408-7c73-4111-8d96-9090416e73f1 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.646930] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance aa89e902-7394-49d5-b6aa-8e9d11548cc5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.646930] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 06b595af-8ca9-444a-974c-135bf87a2ec5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.647166] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 709ed215-d501-409a-ab80-6c4b844d24e6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 682.728227] env[62619]: DEBUG nova.network.neutron [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 682.777868] env[62619]: DEBUG nova.compute.manager [req-67a66d09-c211-4b2f-9df6-98e450bbac58 req-377394c0-8fea-454a-bb1d-89e9027997ad service nova] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Received event network-vif-deleted-f8db1889-6665-4e35-9af5-868daca48f8c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 682.947135] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Releasing lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 682.947678] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 682.947878] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 682.948243] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e758375a-5863-4472-b37b-3224d218891d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.959017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbec16b-6bb8-4f0e-8bdf-1d7270f57db2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 682.978744] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 709ed215-d501-409a-ab80-6c4b844d24e6 could not be found. [ 682.978958] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 682.979190] env[62619]: INFO nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Took 0.03 seconds to destroy the instance on the hypervisor. [ 682.979437] env[62619]: DEBUG oslo.service.loopingcall [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 682.979653] env[62619]: DEBUG nova.compute.manager [-] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 682.979746] env[62619]: DEBUG nova.network.neutron [-] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 683.000092] env[62619]: DEBUG nova.network.neutron [-] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 683.148552] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 7d8ae6c2-1453-4d61-a2b5-311a557087de has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 683.231525] env[62619]: INFO nova.compute.manager [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf] Took 1.02 seconds to deallocate network for instance. [ 683.502678] env[62619]: DEBUG nova.network.neutron [-] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 683.651347] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance de395cf9-2888-4a0d-a1b8-5ce4c36d6182 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.006356] env[62619]: INFO nova.compute.manager [-] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Took 1.03 seconds to deallocate network for instance. [ 684.008982] env[62619]: DEBUG nova.compute.claims [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 684.009144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 684.155704] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 37bb9933-80fa-4a54-82fe-f864a411425f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.268023] env[62619]: INFO nova.scheduler.client.report [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Deleted allocations for instance 7d98f551-5dd5-40b4-bfb7-0d54f69e29bf [ 684.657565] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance d02f8bcb-c754-4308-9c90-260624010cb0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 684.776809] env[62619]: DEBUG oslo_concurrency.lockutils [None req-de841ca6-7c25-44bc-aee5-4ce6fbde0bfd tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "7d98f551-5dd5-40b4-bfb7-0d54f69e29bf" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.210s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 685.161392] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 25df5673-9633-40de-8e72-a8620f19a6f9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 685.278850] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 685.664462] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance d92cd356-0e29-429d-9216-b376e91e0fe8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 685.812893] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 686.169792] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance ffae1b5d-83fc-4007-be0f-c6f1e285f824 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 686.673965] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 9735d6d1-eb10-46b4-a273-10b1351033f0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.177545] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 687.476623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "28a37e28-6bca-4647-9cba-345da2f973a2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 687.476864] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "28a37e28-6bca-4647-9cba-345da2f973a2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 687.683120] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 73145811-c355-462e-9a8e-ffccf2efe683 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.139302] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Acquiring lock "5d7a9c55-b148-4115-8390-66b2501f859a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 688.139529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Lock "5d7a9c55-b148-4115-8390-66b2501f859a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 688.185612] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance c307cc2f-d0c9-49ab-aafa-768a34199f0c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 688.688572] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance c9c375f1-dd7f-43fb-acf6-45e766a8333d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.192423] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 689.695280] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 695dfaa6-8e34-4426-b025-6ce7e4e3174c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.198735] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 690.702296] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance eda91c9e-886e-468e-b9eb-0435c1e94cd3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.205595] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance eedbd5eb-e431-477e-a817-acb8f54fa511 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.708761] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 5ffd77a0-df9a-461c-837e-05b4ff66ea52 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.212843] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance a2ed3e4d-40c2-46b0-9892-0e9cce7b330b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 692.213137] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 692.213323] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 692.479604] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65686df2-8d2e-426c-89df-e11ac55755b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.487270] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f25df74-6c72-40f0-8c06-ac4a6147f98d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.515550] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b36da7e-270f-41e3-a31e-844544fc85b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.522666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85db80b3-8530-46fe-ac42-f2dde578c183 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.535475] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 693.038469] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 693.545780] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 693.545941] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 12.446s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 693.546123] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.167s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 693.547620] env[62619]: INFO nova.compute.claims [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 694.796952] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d116f0de-6589-4ed4-a937-420b1c0be6fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.804395] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-924d7d8b-0b00-4f05-a13f-9cf1d6da1c90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.832707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3632ed33-3bb4-4917-b5f9-5e63af8719dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.840995] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60674fe9-acf2-408d-a198-93f64953509d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 694.859307] env[62619]: DEBUG nova.compute.provider_tree [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 695.362787] env[62619]: DEBUG nova.scheduler.client.report [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 695.869024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.323s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 695.869616] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 695.872351] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 26.382s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 696.379063] env[62619]: DEBUG nova.compute.utils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 696.383553] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 696.383664] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 696.423939] env[62619]: DEBUG nova.policy [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a3ff6674d60e4fa398e2dec6ce91f8f1', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '08e708f15c9c424e9b5f620bd91afb0e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 696.685505] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11c30b6f-365c-4c98-95a1-8f1087aee37d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.689549] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Successfully created port: d3a780b5-e376-4201-8f67-6b386ac68f31 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 696.695889] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c166034-cac4-46f7-bd4d-87ec3a40a363 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.724873] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28011b21-f756-4c70-8241-b85e79dea248 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.731698] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14eb4d36-1a5b-4e08-9ca6-6adb31ae79b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 696.744126] env[62619]: DEBUG nova.compute.provider_tree [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 696.884203] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 697.247532] env[62619]: DEBUG nova.scheduler.client.report [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 697.316394] env[62619]: DEBUG nova.compute.manager [req-b0b6fdf1-8cfe-44d8-b898-a3847163af89 req-a1575ad5-9486-454d-a6a0-9abcd729f35c service nova] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Received event network-changed-d3a780b5-e376-4201-8f67-6b386ac68f31 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 697.316583] env[62619]: DEBUG nova.compute.manager [req-b0b6fdf1-8cfe-44d8-b898-a3847163af89 req-a1575ad5-9486-454d-a6a0-9abcd729f35c service nova] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Refreshing instance network info cache due to event network-changed-d3a780b5-e376-4201-8f67-6b386ac68f31. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 697.316789] env[62619]: DEBUG oslo_concurrency.lockutils [req-b0b6fdf1-8cfe-44d8-b898-a3847163af89 req-a1575ad5-9486-454d-a6a0-9abcd729f35c service nova] Acquiring lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.316925] env[62619]: DEBUG oslo_concurrency.lockutils [req-b0b6fdf1-8cfe-44d8-b898-a3847163af89 req-a1575ad5-9486-454d-a6a0-9abcd729f35c service nova] Acquired lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.317094] env[62619]: DEBUG nova.network.neutron [req-b0b6fdf1-8cfe-44d8-b898-a3847163af89 req-a1575ad5-9486-454d-a6a0-9abcd729f35c service nova] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Refreshing network info cache for port d3a780b5-e376-4201-8f67-6b386ac68f31 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 697.522143] env[62619]: ERROR nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. [ 697.522143] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 697.522143] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.522143] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 697.522143] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.522143] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 697.522143] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.522143] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 697.522143] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.522143] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 697.522143] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.522143] env[62619]: ERROR nova.compute.manager raise self.value [ 697.522143] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.522143] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 697.522143] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.522143] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 697.522661] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.522661] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 697.522661] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. [ 697.522661] env[62619]: ERROR nova.compute.manager [ 697.522661] env[62619]: Traceback (most recent call last): [ 697.522661] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 697.522661] env[62619]: listener.cb(fileno) [ 697.522661] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.522661] env[62619]: result = function(*args, **kwargs) [ 697.522661] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.522661] env[62619]: return func(*args, **kwargs) [ 697.522661] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.522661] env[62619]: raise e [ 697.522661] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.522661] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 697.522661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.522661] env[62619]: created_port_ids = self._update_ports_for_instance( [ 697.522661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.522661] env[62619]: with excutils.save_and_reraise_exception(): [ 697.522661] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.522661] env[62619]: self.force_reraise() [ 697.522661] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.522661] env[62619]: raise self.value [ 697.522661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.522661] env[62619]: updated_port = self._update_port( [ 697.522661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.522661] env[62619]: _ensure_no_port_binding_failure(port) [ 697.522661] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.522661] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 697.523507] env[62619]: nova.exception.PortBindingFailed: Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. [ 697.523507] env[62619]: Removing descriptor: 18 [ 697.755359] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.883s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 697.755988] env[62619]: ERROR nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Traceback (most recent call last): [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self.driver.spawn(context, instance, image_meta, [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] vm_ref = self.build_virtual_machine(instance, [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.755988] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] for vif in network_info: [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] return self._sync_wrapper(fn, *args, **kwargs) [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self.wait() [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self[:] = self._gt.wait() [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] return self._exit_event.wait() [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] result = hub.switch() [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 697.756339] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] return self.greenlet.switch() [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] result = function(*args, **kwargs) [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] return func(*args, **kwargs) [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] raise e [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] nwinfo = self.network_api.allocate_for_instance( [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] created_port_ids = self._update_ports_for_instance( [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] with excutils.save_and_reraise_exception(): [ 697.756713] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] self.force_reraise() [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] raise self.value [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] updated_port = self._update_port( [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] _ensure_no_port_binding_failure(port) [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] raise exception.PortBindingFailed(port_id=port['id']) [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] nova.exception.PortBindingFailed: Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. [ 697.757096] env[62619]: ERROR nova.compute.manager [instance: 500c7408-7c73-4111-8d96-9090416e73f1] [ 697.757417] env[62619]: DEBUG nova.compute.utils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 697.757942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.809s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 697.759572] env[62619]: INFO nova.compute.claims [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 697.762314] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Build of instance 500c7408-7c73-4111-8d96-9090416e73f1 was re-scheduled: Binding failed for port ef14a719-827a-4b61-84cf-d6f4bf04ca6a, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 697.762741] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 697.762963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 697.763121] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquired lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 697.763283] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 697.835123] env[62619]: DEBUG nova.network.neutron [req-b0b6fdf1-8cfe-44d8-b898-a3847163af89 req-a1575ad5-9486-454d-a6a0-9abcd729f35c service nova] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 697.896761] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 697.907643] env[62619]: DEBUG nova.network.neutron [req-b0b6fdf1-8cfe-44d8-b898-a3847163af89 req-a1575ad5-9486-454d-a6a0-9abcd729f35c service nova] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 697.923406] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 697.924121] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 697.924121] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 697.924121] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 697.924121] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 697.924337] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 697.924414] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 697.924568] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 697.924723] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 697.924912] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 697.925111] env[62619]: DEBUG nova.virt.hardware [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 697.926489] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac71d29c-f820-40cd-848c-f4d6a59d768f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.935268] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31d08b7-4c79-4eb0-afe3-409c5abac3bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 697.950268] env[62619]: ERROR nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Traceback (most recent call last): [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] yield resources [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self.driver.spawn(context, instance, image_meta, [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] vm_ref = self.build_virtual_machine(instance, [ 697.950268] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] vif_infos = vmwarevif.get_vif_info(self._session, [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] for vif in network_info: [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] return self._sync_wrapper(fn, *args, **kwargs) [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self.wait() [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self[:] = self._gt.wait() [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] return self._exit_event.wait() [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 697.950675] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] current.throw(*self._exc) [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] result = function(*args, **kwargs) [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] return func(*args, **kwargs) [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] raise e [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] nwinfo = self.network_api.allocate_for_instance( [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] created_port_ids = self._update_ports_for_instance( [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] with excutils.save_and_reraise_exception(): [ 697.951065] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self.force_reraise() [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] raise self.value [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] updated_port = self._update_port( [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] _ensure_no_port_binding_failure(port) [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] raise exception.PortBindingFailed(port_id=port['id']) [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] nova.exception.PortBindingFailed: Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. [ 697.951445] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] [ 697.951445] env[62619]: INFO nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Terminating instance [ 697.952978] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Acquiring lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 698.281971] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.369065] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 698.410183] env[62619]: DEBUG oslo_concurrency.lockutils [req-b0b6fdf1-8cfe-44d8-b898-a3847163af89 req-a1575ad5-9486-454d-a6a0-9abcd729f35c service nova] Releasing lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.410834] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Acquired lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 698.411036] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 698.872051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Releasing lock "refresh_cache-500c7408-7c73-4111-8d96-9090416e73f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 698.872405] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 698.872676] env[62619]: DEBUG nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 698.872968] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 698.888739] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 698.928657] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.002114] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.081909] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990d5907-ebef-44a5-a77d-0a85ad18098d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.094404] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cea60b47-0eba-4790-863f-38d384914a8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.121225] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0356a91e-6f71-4706-bff2-42f8fcf52b7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.127908] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b558f1-0703-4d06-9eab-376655859785 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.140405] env[62619]: DEBUG nova.compute.provider_tree [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 699.355302] env[62619]: DEBUG nova.compute.manager [req-0a3e6785-30f6-4b3b-a3f4-dd5519d1af45 req-481a184e-02c7-4a2d-aa52-6d796ab3a978 service nova] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Received event network-vif-deleted-d3a780b5-e376-4201-8f67-6b386ac68f31 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 699.393894] env[62619]: DEBUG nova.network.neutron [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 699.504302] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Releasing lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 699.504734] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 699.504923] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 699.505229] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a823683d-0805-4a38-9ec0-e013337bd5ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.514142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f61cd494-9c97-4698-ab51-2b8af62c0a36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 699.535228] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7d8ae6c2-1453-4d61-a2b5-311a557087de could not be found. [ 699.535454] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 699.535626] env[62619]: INFO nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Took 0.03 seconds to destroy the instance on the hypervisor. [ 699.535863] env[62619]: DEBUG oslo.service.loopingcall [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 699.536090] env[62619]: DEBUG nova.compute.manager [-] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 699.536190] env[62619]: DEBUG nova.network.neutron [-] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 699.553122] env[62619]: DEBUG nova.network.neutron [-] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 699.643056] env[62619]: DEBUG nova.scheduler.client.report [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 699.896394] env[62619]: INFO nova.compute.manager [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 500c7408-7c73-4111-8d96-9090416e73f1] Took 1.02 seconds to deallocate network for instance. [ 700.055205] env[62619]: DEBUG nova.network.neutron [-] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 700.147670] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 700.148212] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 700.150874] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.304s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 700.152235] env[62619]: INFO nova.compute.claims [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 700.557633] env[62619]: INFO nova.compute.manager [-] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Took 1.02 seconds to deallocate network for instance. [ 700.560085] env[62619]: DEBUG nova.compute.claims [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 700.560272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 700.658462] env[62619]: DEBUG nova.compute.utils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 700.659866] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 700.660096] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 700.700471] env[62619]: DEBUG nova.policy [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '10e784fe682e4b989f2571b00fbf7e87', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'fbafd9d7551947af9308e2ecdc0b071c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 700.923876] env[62619]: INFO nova.scheduler.client.report [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Deleted allocations for instance 500c7408-7c73-4111-8d96-9090416e73f1 [ 700.982090] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Successfully created port: 5b7e5bef-60fd-47c6-b905-8bd2d37fe055 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 701.163667] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 701.433970] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a83b1d68-a9cf-4373-af12-b5d5dcd6d2e7 tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "500c7408-7c73-4111-8d96-9090416e73f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 107.933s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 701.545702] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-178ca65c-fae0-4be4-bee7-114083644267 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.553917] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d4d0b22-df85-4f3d-a3ab-15ec3153726d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.586559] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88b2f721-d242-4e6f-ad5a-93f8e72d74cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.593704] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f3c62d3-f682-49bc-98f8-28473cf5621c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 701.609045] env[62619]: DEBUG nova.compute.provider_tree [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 701.672837] env[62619]: DEBUG nova.compute.manager [req-58196933-8e6f-46f4-98ea-7fb93f55bf2b req-f8e40bbf-da07-43cb-8046-c45cf8b653a2 service nova] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Received event network-changed-5b7e5bef-60fd-47c6-b905-8bd2d37fe055 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 701.673119] env[62619]: DEBUG nova.compute.manager [req-58196933-8e6f-46f4-98ea-7fb93f55bf2b req-f8e40bbf-da07-43cb-8046-c45cf8b653a2 service nova] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Refreshing instance network info cache due to event network-changed-5b7e5bef-60fd-47c6-b905-8bd2d37fe055. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 701.673377] env[62619]: DEBUG oslo_concurrency.lockutils [req-58196933-8e6f-46f4-98ea-7fb93f55bf2b req-f8e40bbf-da07-43cb-8046-c45cf8b653a2 service nova] Acquiring lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 701.673553] env[62619]: DEBUG oslo_concurrency.lockutils [req-58196933-8e6f-46f4-98ea-7fb93f55bf2b req-f8e40bbf-da07-43cb-8046-c45cf8b653a2 service nova] Acquired lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 701.673746] env[62619]: DEBUG nova.network.neutron [req-58196933-8e6f-46f4-98ea-7fb93f55bf2b req-f8e40bbf-da07-43cb-8046-c45cf8b653a2 service nova] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Refreshing network info cache for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 701.852685] env[62619]: ERROR nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. [ 701.852685] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 701.852685] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.852685] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 701.852685] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 701.852685] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 701.852685] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 701.852685] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 701.852685] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.852685] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 701.852685] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.852685] env[62619]: ERROR nova.compute.manager raise self.value [ 701.852685] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 701.852685] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 701.852685] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.852685] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 701.853408] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.853408] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 701.853408] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. [ 701.853408] env[62619]: ERROR nova.compute.manager [ 701.853408] env[62619]: Traceback (most recent call last): [ 701.853408] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 701.853408] env[62619]: listener.cb(fileno) [ 701.853408] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 701.853408] env[62619]: result = function(*args, **kwargs) [ 701.853408] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 701.853408] env[62619]: return func(*args, **kwargs) [ 701.853408] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 701.853408] env[62619]: raise e [ 701.853408] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 701.853408] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 701.853408] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 701.853408] env[62619]: created_port_ids = self._update_ports_for_instance( [ 701.853408] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 701.853408] env[62619]: with excutils.save_and_reraise_exception(): [ 701.853408] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 701.853408] env[62619]: self.force_reraise() [ 701.853408] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 701.853408] env[62619]: raise self.value [ 701.853408] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 701.853408] env[62619]: updated_port = self._update_port( [ 701.853408] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 701.853408] env[62619]: _ensure_no_port_binding_failure(port) [ 701.853408] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 701.853408] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 701.855495] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. [ 701.855495] env[62619]: Removing descriptor: 18 [ 701.938364] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 702.112394] env[62619]: DEBUG nova.scheduler.client.report [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 702.177344] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 702.200121] env[62619]: DEBUG nova.network.neutron [req-58196933-8e6f-46f4-98ea-7fb93f55bf2b req-f8e40bbf-da07-43cb-8046-c45cf8b653a2 service nova] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 702.223051] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 702.223051] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 702.223051] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 702.223236] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 702.223236] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 702.223236] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 702.223236] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 702.223236] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 702.224090] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 702.224357] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 702.224543] env[62619]: DEBUG nova.virt.hardware [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 702.225432] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b040bc0-6513-4ab1-9a04-3ec685c55f74 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.235885] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e995cbae-92ef-4a32-8d1e-94ecb049fa8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 702.247421] env[62619]: ERROR nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Traceback (most recent call last): [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] yield resources [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self.driver.spawn(context, instance, image_meta, [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self._vmops.spawn(context, instance, image_meta, injected_files, [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] vm_ref = self.build_virtual_machine(instance, [ 702.247421] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] vif_infos = vmwarevif.get_vif_info(self._session, [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] for vif in network_info: [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] return self._sync_wrapper(fn, *args, **kwargs) [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self.wait() [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self[:] = self._gt.wait() [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] return self._exit_event.wait() [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 702.247764] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] current.throw(*self._exc) [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] result = function(*args, **kwargs) [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] return func(*args, **kwargs) [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] raise e [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] nwinfo = self.network_api.allocate_for_instance( [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] created_port_ids = self._update_ports_for_instance( [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] with excutils.save_and_reraise_exception(): [ 702.248430] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self.force_reraise() [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] raise self.value [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] updated_port = self._update_port( [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] _ensure_no_port_binding_failure(port) [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] raise exception.PortBindingFailed(port_id=port['id']) [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] nova.exception.PortBindingFailed: Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. [ 702.248801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] [ 702.248801] env[62619]: INFO nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Terminating instance [ 702.249799] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Acquiring lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 702.298977] env[62619]: DEBUG nova.network.neutron [req-58196933-8e6f-46f4-98ea-7fb93f55bf2b req-f8e40bbf-da07-43cb-8046-c45cf8b653a2 service nova] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 702.462051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 702.616992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.466s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 702.617653] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 702.620684] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.169s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 702.804034] env[62619]: DEBUG oslo_concurrency.lockutils [req-58196933-8e6f-46f4-98ea-7fb93f55bf2b req-f8e40bbf-da07-43cb-8046-c45cf8b653a2 service nova] Releasing lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 702.804209] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Acquired lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 702.804796] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 703.124828] env[62619]: DEBUG nova.compute.utils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 703.130125] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 703.130307] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 703.180629] env[62619]: DEBUG nova.policy [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1951cb4058114ac695f1ebc8980135bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62daf278f23642d3b3210ed2bfa85311', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 703.328632] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 703.446044] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c878ff6-dccf-4fde-83a9-55fd9dc68c2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.456414] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a15b59d-9836-4f42-89a8-a3ce2b656d58 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.485570] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Successfully created port: f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 703.487928] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 703.489518] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dd2a863-d019-46e7-94ab-d35186d3d743 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.498593] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d581b4c1-9816-4af3-9871-74d4bc7c8800 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 703.514025] env[62619]: DEBUG nova.compute.provider_tree [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 703.631073] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 703.697148] env[62619]: DEBUG nova.compute.manager [req-4c4e7480-6f37-4e82-98ef-7b5bf040e683 req-5556f028-c798-4391-b673-2dd15c94b7aa service nova] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Received event network-vif-deleted-5b7e5bef-60fd-47c6-b905-8bd2d37fe055 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 703.875816] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "0ccdb6cd-d895-42f9-83fa-007c69ce77f9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 703.876080] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "0ccdb6cd-d895-42f9-83fa-007c69ce77f9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 703.992735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Releasing lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 703.993227] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 703.993422] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 703.993728] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6178c00a-4512-494f-97a9-63cc1b3c5f81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.002848] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f637581a-759e-47ab-bf95-0b4d42594561 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.016526] env[62619]: DEBUG nova.scheduler.client.report [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 704.025493] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance de395cf9-2888-4a0d-a1b8-5ce4c36d6182 could not be found. [ 704.025493] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 704.025493] env[62619]: INFO nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Took 0.03 seconds to destroy the instance on the hypervisor. [ 704.025493] env[62619]: DEBUG oslo.service.loopingcall [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 704.025684] env[62619]: DEBUG nova.compute.manager [-] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 704.025684] env[62619]: DEBUG nova.network.neutron [-] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 704.039326] env[62619]: DEBUG nova.network.neutron [-] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 704.302094] env[62619]: ERROR nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. [ 704.302094] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 704.302094] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.302094] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 704.302094] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.302094] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 704.302094] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.302094] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 704.302094] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.302094] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 704.302094] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.302094] env[62619]: ERROR nova.compute.manager raise self.value [ 704.302094] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.302094] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 704.302094] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.302094] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 704.302846] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.302846] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 704.302846] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. [ 704.302846] env[62619]: ERROR nova.compute.manager [ 704.302846] env[62619]: Traceback (most recent call last): [ 704.302846] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 704.302846] env[62619]: listener.cb(fileno) [ 704.302846] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.302846] env[62619]: result = function(*args, **kwargs) [ 704.302846] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.302846] env[62619]: return func(*args, **kwargs) [ 704.302846] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.302846] env[62619]: raise e [ 704.302846] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.302846] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 704.302846] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.302846] env[62619]: created_port_ids = self._update_ports_for_instance( [ 704.302846] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.302846] env[62619]: with excutils.save_and_reraise_exception(): [ 704.302846] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.302846] env[62619]: self.force_reraise() [ 704.302846] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.302846] env[62619]: raise self.value [ 704.302846] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.302846] env[62619]: updated_port = self._update_port( [ 704.302846] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.302846] env[62619]: _ensure_no_port_binding_failure(port) [ 704.302846] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.302846] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 704.303806] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. [ 704.303806] env[62619]: Removing descriptor: 18 [ 704.521028] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.900s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 704.521577] env[62619]: ERROR nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Traceback (most recent call last): [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self.driver.spawn(context, instance, image_meta, [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] vm_ref = self.build_virtual_machine(instance, [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.521577] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] for vif in network_info: [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] return self._sync_wrapper(fn, *args, **kwargs) [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self.wait() [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self[:] = self._gt.wait() [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] return self._exit_event.wait() [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] current.throw(*self._exc) [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.521984] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] result = function(*args, **kwargs) [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] return func(*args, **kwargs) [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] raise e [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] nwinfo = self.network_api.allocate_for_instance( [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] created_port_ids = self._update_ports_for_instance( [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] with excutils.save_and_reraise_exception(): [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] self.force_reraise() [ 704.522388] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] raise self.value [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] updated_port = self._update_port( [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] _ensure_no_port_binding_failure(port) [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] raise exception.PortBindingFailed(port_id=port['id']) [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] nova.exception.PortBindingFailed: Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. [ 704.522759] env[62619]: ERROR nova.compute.manager [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] [ 704.522759] env[62619]: DEBUG nova.compute.utils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 704.523687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.277s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 704.526291] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Build of instance aa89e902-7394-49d5-b6aa-8e9d11548cc5 was re-scheduled: Binding failed for port 35df3843-7749-4475-85b7-7690b6ddf4f3, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 704.527019] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 704.527019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.527145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.527228] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 704.541008] env[62619]: DEBUG nova.network.neutron [-] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 704.640233] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 704.667720] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 704.667961] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 704.668146] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 704.668347] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 704.668489] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 704.668629] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 704.668830] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 704.668984] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 704.669187] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 704.669357] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 704.669524] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 704.670430] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02b723b-43a0-4900-8267-d024b9d45341 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.678237] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f279c0cb-c42b-4007-bbb2-ec6d706aa93d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 704.691504] env[62619]: ERROR nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Traceback (most recent call last): [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] yield resources [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self.driver.spawn(context, instance, image_meta, [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] vm_ref = self.build_virtual_machine(instance, [ 704.691504] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] vif_infos = vmwarevif.get_vif_info(self._session, [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] for vif in network_info: [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] return self._sync_wrapper(fn, *args, **kwargs) [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self.wait() [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self[:] = self._gt.wait() [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] return self._exit_event.wait() [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 704.691963] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] current.throw(*self._exc) [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] result = function(*args, **kwargs) [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] return func(*args, **kwargs) [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] raise e [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] nwinfo = self.network_api.allocate_for_instance( [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] created_port_ids = self._update_ports_for_instance( [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] with excutils.save_and_reraise_exception(): [ 704.692538] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self.force_reraise() [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] raise self.value [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] updated_port = self._update_port( [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] _ensure_no_port_binding_failure(port) [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] raise exception.PortBindingFailed(port_id=port['id']) [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] nova.exception.PortBindingFailed: Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. [ 704.692962] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] [ 704.692962] env[62619]: INFO nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Terminating instance [ 704.693749] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 704.693874] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquired lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 704.694041] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 705.043503] env[62619]: INFO nova.compute.manager [-] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Took 1.02 seconds to deallocate network for instance. [ 705.048925] env[62619]: DEBUG nova.compute.claims [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 705.049134] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 705.166800] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.210199] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.246944] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.299186] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bea55b56-e339-478f-9946-c43758b9ea1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.307307] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83b1781-83b1-4667-96e5-64df735967f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.336642] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba772d0b-e8ea-4472-8eed-398469837318 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.344276] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c2c6ac-157b-4ede-b5cf-da184d50030c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 705.358150] env[62619]: DEBUG nova.compute.provider_tree [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 705.489427] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 705.718833] env[62619]: DEBUG nova.compute.manager [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Received event network-changed-f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 705.719026] env[62619]: DEBUG nova.compute.manager [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Refreshing instance network info cache due to event network-changed-f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 705.719345] env[62619]: DEBUG oslo_concurrency.lockutils [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] Acquiring lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 705.750188] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "refresh_cache-aa89e902-7394-49d5-b6aa-8e9d11548cc5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.750424] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 705.750626] env[62619]: DEBUG nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 705.750788] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 705.764328] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 705.861614] env[62619]: DEBUG nova.scheduler.client.report [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 705.992272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Releasing lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 705.992673] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 705.992843] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 705.993230] env[62619]: DEBUG oslo_concurrency.lockutils [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] Acquired lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 705.993414] env[62619]: DEBUG nova.network.neutron [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Refreshing network info cache for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 705.994800] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4cbfd839-f01f-4977-9971-0a7181ee0636 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.004449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-986541f9-1341-4c0c-a410-aeb2db124d7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 706.026704] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 37bb9933-80fa-4a54-82fe-f864a411425f could not be found. [ 706.026910] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 706.027098] env[62619]: INFO nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Took 0.03 seconds to destroy the instance on the hypervisor. [ 706.027336] env[62619]: DEBUG oslo.service.loopingcall [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 706.027548] env[62619]: DEBUG nova.compute.manager [-] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 706.027639] env[62619]: DEBUG nova.network.neutron [-] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 706.040518] env[62619]: DEBUG nova.network.neutron [-] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.267022] env[62619]: DEBUG nova.network.neutron [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.366624] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.843s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 706.367197] env[62619]: ERROR nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Traceback (most recent call last): [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self.driver.spawn(context, instance, image_meta, [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] vm_ref = self.build_virtual_machine(instance, [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] vif_infos = vmwarevif.get_vif_info(self._session, [ 706.367197] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] for vif in network_info: [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] return self._sync_wrapper(fn, *args, **kwargs) [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self.wait() [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self[:] = self._gt.wait() [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] return self._exit_event.wait() [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] current.throw(*self._exc) [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 706.367770] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] result = function(*args, **kwargs) [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] return func(*args, **kwargs) [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] raise e [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] nwinfo = self.network_api.allocate_for_instance( [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] created_port_ids = self._update_ports_for_instance( [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] with excutils.save_and_reraise_exception(): [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] self.force_reraise() [ 706.368433] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] raise self.value [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] updated_port = self._update_port( [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] _ensure_no_port_binding_failure(port) [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] raise exception.PortBindingFailed(port_id=port['id']) [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] nova.exception.PortBindingFailed: Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. [ 706.369049] env[62619]: ERROR nova.compute.manager [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] [ 706.369049] env[62619]: DEBUG nova.compute.utils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 706.369534] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Build of instance 06b595af-8ca9-444a-974c-135bf87a2ec5 was re-scheduled: Binding failed for port 57ea30d5-424d-4a58-8b25-e3a3ba75440a, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 706.369928] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 706.370209] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquiring lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 706.370426] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Acquired lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 706.370595] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 706.371548] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 26.749s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 706.375023] env[62619]: INFO nova.compute.claims [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 706.530662] env[62619]: DEBUG nova.network.neutron [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.543246] env[62619]: DEBUG nova.network.neutron [-] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.641756] env[62619]: DEBUG nova.network.neutron [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 706.769841] env[62619]: INFO nova.compute.manager [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: aa89e902-7394-49d5-b6aa-8e9d11548cc5] Took 1.02 seconds to deallocate network for instance. [ 706.893343] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 706.991479] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 707.046142] env[62619]: INFO nova.compute.manager [-] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Took 1.02 seconds to deallocate network for instance. [ 707.048206] env[62619]: DEBUG nova.compute.claims [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 707.048390] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 707.145970] env[62619]: DEBUG oslo_concurrency.lockutils [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] Releasing lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.146334] env[62619]: DEBUG nova.compute.manager [req-16961b43-b485-4296-ba44-1ffa96cb312e req-5899ddfb-e076-4ff8-a8bf-db2dfbb4df09 service nova] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Received event network-vif-deleted-f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 707.493741] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Releasing lock "refresh_cache-06b595af-8ca9-444a-974c-135bf87a2ec5" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 707.493991] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 707.494213] env[62619]: DEBUG nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 707.494402] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 707.513585] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 707.648943] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b18096a-6323-4087-8551-cb96b358f9a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.657030] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a285aac-912e-457f-898d-d9c847696aa7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.687445] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4543fa5-026b-4d91-8e55-f1161b14c032 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.694558] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fb8112-3b1b-4caf-9228-3b2c2a73ef8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.707225] env[62619]: DEBUG nova.compute.provider_tree [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 707.798689] env[62619]: INFO nova.scheduler.client.report [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted allocations for instance aa89e902-7394-49d5-b6aa-8e9d11548cc5 [ 708.015849] env[62619]: DEBUG nova.network.neutron [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 708.210193] env[62619]: DEBUG nova.scheduler.client.report [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 708.307036] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9fe37cf6-53c7-4694-beb7-d90fc264721d tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "aa89e902-7394-49d5-b6aa-8e9d11548cc5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.672s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.518983] env[62619]: INFO nova.compute.manager [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] [instance: 06b595af-8ca9-444a-974c-135bf87a2ec5] Took 1.02 seconds to deallocate network for instance. [ 708.715404] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.344s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 708.715828] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 708.718439] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.211s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 708.719762] env[62619]: INFO nova.compute.claims [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 708.809608] env[62619]: DEBUG nova.compute.manager [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 709.223683] env[62619]: DEBUG nova.compute.utils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 709.227136] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 709.227136] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 709.331375] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 709.368543] env[62619]: DEBUG nova.policy [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1951cb4058114ac695f1ebc8980135bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62daf278f23642d3b3210ed2bfa85311', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 709.551101] env[62619]: INFO nova.scheduler.client.report [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Deleted allocations for instance 06b595af-8ca9-444a-974c-135bf87a2ec5 [ 709.729517] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 709.771375] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Successfully created port: ef42913a-1667-4c32-8ee2-13c61f14c9ef {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 710.063340] env[62619]: DEBUG oslo_concurrency.lockutils [None req-395c9200-7c60-4ca6-85a0-f382246522e2 tempest-SecurityGroupsTestJSON-1243243120 tempest-SecurityGroupsTestJSON-1243243120-project-member] Lock "06b595af-8ca9-444a-974c-135bf87a2ec5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.822s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 710.082601] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-641331ea-709a-4f85-9006-4eefef3efc3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.092824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d771aa43-b58a-4f38-a063-380a46390385 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.126275] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7d8f284-2b54-4e31-b241-7799da855a45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.133647] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c527f765-a31b-4f4e-bfda-f1b74fb2a7f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.146624] env[62619]: DEBUG nova.compute.provider_tree [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.300197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "05a77fe4-172d-4e25-9652-f9dc7cc365ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 710.300502] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "05a77fe4-172d-4e25-9652-f9dc7cc365ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 710.566237] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 710.649564] env[62619]: DEBUG nova.scheduler.client.report [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 710.738860] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 710.765929] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 710.766277] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 710.766444] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.766625] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 710.766771] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.766905] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 710.767119] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 710.767273] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 710.767436] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 710.767589] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 710.767891] env[62619]: DEBUG nova.virt.hardware [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 710.768660] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f99a86e1-172e-43f0-b70b-055f3d7373fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.777027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ea4721-bb14-4088-a494-32ba92e6d032 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.015165] env[62619]: DEBUG nova.compute.manager [req-77e9836f-b963-4476-8d2d-b49fe9c85fbf req-a3fd3118-a368-4624-97f9-e8e2a553e3cb service nova] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Received event network-changed-ef42913a-1667-4c32-8ee2-13c61f14c9ef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 711.015358] env[62619]: DEBUG nova.compute.manager [req-77e9836f-b963-4476-8d2d-b49fe9c85fbf req-a3fd3118-a368-4624-97f9-e8e2a553e3cb service nova] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Refreshing instance network info cache due to event network-changed-ef42913a-1667-4c32-8ee2-13c61f14c9ef. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 711.015568] env[62619]: DEBUG oslo_concurrency.lockutils [req-77e9836f-b963-4476-8d2d-b49fe9c85fbf req-a3fd3118-a368-4624-97f9-e8e2a553e3cb service nova] Acquiring lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.015704] env[62619]: DEBUG oslo_concurrency.lockutils [req-77e9836f-b963-4476-8d2d-b49fe9c85fbf req-a3fd3118-a368-4624-97f9-e8e2a553e3cb service nova] Acquired lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 711.015852] env[62619]: DEBUG nova.network.neutron [req-77e9836f-b963-4476-8d2d-b49fe9c85fbf req-a3fd3118-a368-4624-97f9-e8e2a553e3cb service nova] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Refreshing network info cache for port ef42913a-1667-4c32-8ee2-13c61f14c9ef {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 711.090257] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 711.154553] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.436s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 711.155317] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 711.161031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 27.148s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 711.192281] env[62619]: ERROR nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. [ 711.192281] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 711.192281] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.192281] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 711.192281] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.192281] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 711.192281] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.192281] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 711.192281] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.192281] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 711.192281] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.192281] env[62619]: ERROR nova.compute.manager raise self.value [ 711.192281] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.192281] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 711.192281] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.192281] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 711.192739] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.192739] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 711.192739] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. [ 711.192739] env[62619]: ERROR nova.compute.manager [ 711.192739] env[62619]: Traceback (most recent call last): [ 711.192739] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 711.192739] env[62619]: listener.cb(fileno) [ 711.192739] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.192739] env[62619]: result = function(*args, **kwargs) [ 711.192739] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.192739] env[62619]: return func(*args, **kwargs) [ 711.192739] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.192739] env[62619]: raise e [ 711.192739] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.192739] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 711.192739] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.192739] env[62619]: created_port_ids = self._update_ports_for_instance( [ 711.192739] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.192739] env[62619]: with excutils.save_and_reraise_exception(): [ 711.192739] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.192739] env[62619]: self.force_reraise() [ 711.192739] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.192739] env[62619]: raise self.value [ 711.192739] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.192739] env[62619]: updated_port = self._update_port( [ 711.192739] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.192739] env[62619]: _ensure_no_port_binding_failure(port) [ 711.192739] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.192739] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 711.193509] env[62619]: nova.exception.PortBindingFailed: Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. [ 711.193509] env[62619]: Removing descriptor: 17 [ 711.193509] env[62619]: ERROR nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Traceback (most recent call last): [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] yield resources [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self.driver.spawn(context, instance, image_meta, [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 711.193509] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] vm_ref = self.build_virtual_machine(instance, [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] vif_infos = vmwarevif.get_vif_info(self._session, [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] for vif in network_info: [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] return self._sync_wrapper(fn, *args, **kwargs) [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self.wait() [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self[:] = self._gt.wait() [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] return self._exit_event.wait() [ 711.193838] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] result = hub.switch() [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] return self.greenlet.switch() [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] result = function(*args, **kwargs) [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] return func(*args, **kwargs) [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] raise e [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] nwinfo = self.network_api.allocate_for_instance( [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 711.194173] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] created_port_ids = self._update_ports_for_instance( [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] with excutils.save_and_reraise_exception(): [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self.force_reraise() [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] raise self.value [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] updated_port = self._update_port( [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] _ensure_no_port_binding_failure(port) [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 711.194493] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] raise exception.PortBindingFailed(port_id=port['id']) [ 711.194797] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] nova.exception.PortBindingFailed: Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. [ 711.194797] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] [ 711.194797] env[62619]: INFO nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Terminating instance [ 711.195540] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 711.533388] env[62619]: DEBUG nova.network.neutron [req-77e9836f-b963-4476-8d2d-b49fe9c85fbf req-a3fd3118-a368-4624-97f9-e8e2a553e3cb service nova] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 711.650125] env[62619]: DEBUG nova.network.neutron [req-77e9836f-b963-4476-8d2d-b49fe9c85fbf req-a3fd3118-a368-4624-97f9-e8e2a553e3cb service nova] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.662457] env[62619]: DEBUG nova.compute.utils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 711.667048] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 711.667048] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 711.746890] env[62619]: DEBUG nova.policy [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41faf1fa74be4052a1f29f064ad9c320', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '442aa0d2e85b4120b7ee0b011a3f40ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 711.968116] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d1acfbf-b22f-4f92-88d0-8a066fe2728d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.978019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02579071-4f61-40fa-af45-692422ff8974 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.011470] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4513ed1b-b4b1-4085-b719-804bd3a20ac7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.018813] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d4ec07b-1835-44f7-83ce-9cd04dc8ecaf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.031903] env[62619]: DEBUG nova.compute.provider_tree [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 712.153578] env[62619]: DEBUG oslo_concurrency.lockutils [req-77e9836f-b963-4476-8d2d-b49fe9c85fbf req-a3fd3118-a368-4624-97f9-e8e2a553e3cb service nova] Releasing lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 712.153986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquired lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 712.154187] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 712.170020] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 712.534385] env[62619]: DEBUG nova.scheduler.client.report [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 712.663754] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Successfully created port: 8e743676-d778-4222-8f5d-da5b23e50ec7 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 712.684639] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 712.772681] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.039616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.882s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 713.040277] env[62619]: ERROR nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Traceback (most recent call last): [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self.driver.spawn(context, instance, image_meta, [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self._vmops.spawn(context, instance, image_meta, injected_files, [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] vm_ref = self.build_virtual_machine(instance, [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] vif_infos = vmwarevif.get_vif_info(self._session, [ 713.040277] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] for vif in network_info: [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] return self._sync_wrapper(fn, *args, **kwargs) [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self.wait() [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self[:] = self._gt.wait() [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] return self._exit_event.wait() [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] current.throw(*self._exc) [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 713.040638] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] result = function(*args, **kwargs) [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] return func(*args, **kwargs) [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] raise e [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] nwinfo = self.network_api.allocate_for_instance( [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] created_port_ids = self._update_ports_for_instance( [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] with excutils.save_and_reraise_exception(): [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] self.force_reraise() [ 713.041023] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] raise self.value [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] updated_port = self._update_port( [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] _ensure_no_port_binding_failure(port) [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] raise exception.PortBindingFailed(port_id=port['id']) [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] nova.exception.PortBindingFailed: Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. [ 713.041404] env[62619]: ERROR nova.compute.manager [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] [ 713.041404] env[62619]: DEBUG nova.compute.utils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 713.042720] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 27.232s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 713.044146] env[62619]: INFO nova.compute.claims [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 713.048244] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Build of instance 709ed215-d501-409a-ab80-6c4b844d24e6 was re-scheduled: Binding failed for port f8db1889-6665-4e35-9af5-868daca48f8c, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 713.048663] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 713.048885] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Acquiring lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 713.049041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Acquired lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 713.049217] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 713.053258] env[62619]: DEBUG nova.compute.manager [req-5d0f653e-da7a-45b6-b1f8-2f41f78728fc req-903af1d2-daba-441a-a847-3fb44b6992ab service nova] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Received event network-vif-deleted-ef42913a-1667-4c32-8ee2-13c61f14c9ef {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 713.177955] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 713.218950] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 713.218950] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 713.218950] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 713.219506] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 713.219506] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 713.219506] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 713.219506] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 713.219697] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 713.219781] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 713.219939] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 713.220123] env[62619]: DEBUG nova.virt.hardware [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 713.221281] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ee324a4-6674-47b3-8ccf-3888b7e07009 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.229149] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239fb672-191c-49ef-83d2-6a1b952c1a17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.275803] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Releasing lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 713.279018] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 713.279018] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 713.279018] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a02549f4-a963-4b61-986d-f1db7718f520 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.284983] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02eb2cbc-5397-4faf-be49-8def1cd18dfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 713.306263] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d02f8bcb-c754-4308-9c90-260624010cb0 could not be found. [ 713.306487] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 713.306672] env[62619]: INFO nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 713.306917] env[62619]: DEBUG oslo.service.loopingcall [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 713.307153] env[62619]: DEBUG nova.compute.manager [-] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 713.307250] env[62619]: DEBUG nova.network.neutron [-] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 713.321935] env[62619]: DEBUG nova.network.neutron [-] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.578768] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 713.691194] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 713.824924] env[62619]: DEBUG nova.network.neutron [-] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.165809] env[62619]: ERROR nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. [ 714.165809] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 714.165809] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.165809] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 714.165809] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 714.165809] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 714.165809] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 714.165809] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 714.165809] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.165809] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 714.165809] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.165809] env[62619]: ERROR nova.compute.manager raise self.value [ 714.165809] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 714.165809] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 714.165809] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.165809] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 714.166547] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.166547] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 714.166547] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. [ 714.166547] env[62619]: ERROR nova.compute.manager [ 714.166826] env[62619]: Traceback (most recent call last): [ 714.166886] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 714.166886] env[62619]: listener.cb(fileno) [ 714.166886] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.166886] env[62619]: result = function(*args, **kwargs) [ 714.166886] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 714.166886] env[62619]: return func(*args, **kwargs) [ 714.166886] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 714.166886] env[62619]: raise e [ 714.166886] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.166886] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 714.166886] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 714.166886] env[62619]: created_port_ids = self._update_ports_for_instance( [ 714.166886] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 714.166886] env[62619]: with excutils.save_and_reraise_exception(): [ 714.166886] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.166886] env[62619]: self.force_reraise() [ 714.166886] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.166886] env[62619]: raise self.value [ 714.166886] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 714.166886] env[62619]: updated_port = self._update_port( [ 714.166886] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.166886] env[62619]: _ensure_no_port_binding_failure(port) [ 714.166886] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.166886] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 714.166886] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. [ 714.166886] env[62619]: Removing descriptor: 17 [ 714.170242] env[62619]: ERROR nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Traceback (most recent call last): [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] yield resources [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self.driver.spawn(context, instance, image_meta, [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] vm_ref = self.build_virtual_machine(instance, [ 714.170242] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] for vif in network_info: [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] return self._sync_wrapper(fn, *args, **kwargs) [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self.wait() [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self[:] = self._gt.wait() [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] return self._exit_event.wait() [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 714.170862] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] result = hub.switch() [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] return self.greenlet.switch() [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] result = function(*args, **kwargs) [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] return func(*args, **kwargs) [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] raise e [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] nwinfo = self.network_api.allocate_for_instance( [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] created_port_ids = self._update_ports_for_instance( [ 714.171416] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] with excutils.save_and_reraise_exception(): [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self.force_reraise() [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] raise self.value [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] updated_port = self._update_port( [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] _ensure_no_port_binding_failure(port) [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] raise exception.PortBindingFailed(port_id=port['id']) [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] nova.exception.PortBindingFailed: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. [ 714.171972] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] [ 714.173109] env[62619]: INFO nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Terminating instance [ 714.173157] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 714.173346] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquired lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 714.173506] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 714.194531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Releasing lock "refresh_cache-709ed215-d501-409a-ab80-6c4b844d24e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 714.194717] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 714.194887] env[62619]: DEBUG nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 714.195197] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 714.212614] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.330464] env[62619]: INFO nova.compute.manager [-] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Took 1.02 seconds to deallocate network for instance. [ 714.332859] env[62619]: DEBUG nova.compute.claims [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 714.333120] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 714.374067] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfa9ce9-7d99-4580-b942-e3420708f349 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.381700] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbd2997e-30e7-4503-9b51-5270de0611f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.411848] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d3301ea-b0c6-439a-8182-ad05d2bf61fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.419230] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa2b9e6-ab2b-4f29-86b6-d567da66ef54 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 714.432175] env[62619]: DEBUG nova.compute.provider_tree [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 714.691396] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 714.715643] env[62619]: DEBUG nova.network.neutron [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.763557] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 714.935379] env[62619]: DEBUG nova.scheduler.client.report [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 715.069701] env[62619]: DEBUG nova.compute.manager [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Received event network-changed-8e743676-d778-4222-8f5d-da5b23e50ec7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 715.069909] env[62619]: DEBUG nova.compute.manager [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Refreshing instance network info cache due to event network-changed-8e743676-d778-4222-8f5d-da5b23e50ec7. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 715.070626] env[62619]: DEBUG oslo_concurrency.lockutils [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] Acquiring lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 715.219584] env[62619]: INFO nova.compute.manager [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] [instance: 709ed215-d501-409a-ab80-6c4b844d24e6] Took 1.02 seconds to deallocate network for instance. [ 715.269017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Releasing lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 715.269017] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 715.269017] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 715.269017] env[62619]: DEBUG oslo_concurrency.lockutils [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] Acquired lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 715.269017] env[62619]: DEBUG nova.network.neutron [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Refreshing network info cache for port 8e743676-d778-4222-8f5d-da5b23e50ec7 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 715.269237] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-41e055a2-4391-47b5-81b1-db0495f820cb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.282948] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba11325b-36c8-4c14-9ab0-4deabb50be26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 715.305532] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 25df5673-9633-40de-8e72-a8620f19a6f9 could not be found. [ 715.306044] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 715.309105] env[62619]: INFO nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 715.309105] env[62619]: DEBUG oslo.service.loopingcall [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 715.309105] env[62619]: DEBUG nova.compute.manager [-] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 715.309105] env[62619]: DEBUG nova.network.neutron [-] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 715.437606] env[62619]: DEBUG nova.network.neutron [-] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.440319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 715.440625] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 715.442955] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.883s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 715.812472] env[62619]: DEBUG nova.network.neutron [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 715.940850] env[62619]: DEBUG nova.network.neutron [-] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 715.947806] env[62619]: DEBUG nova.compute.utils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 715.952076] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 715.952255] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 715.956937] env[62619]: DEBUG nova.network.neutron [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 716.013728] env[62619]: DEBUG nova.policy [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '41faf1fa74be4052a1f29f064ad9c320', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '442aa0d2e85b4120b7ee0b011a3f40ec', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 716.248143] env[62619]: INFO nova.scheduler.client.report [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Deleted allocations for instance 709ed215-d501-409a-ab80-6c4b844d24e6 [ 716.259500] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Successfully created port: f1bd94e6-9fe8-40f6-993a-29348d3d4d43 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 716.267716] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa17dda3-778f-4fc6-8458-f06704892a40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.276176] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84060fb1-fd77-475e-be97-5fc3dbed0003 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.308300] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-913c7919-8e25-418b-8fc3-27160cf8d8ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.317855] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b42008dd-aa6b-40a9-a2bd-dea6a7b090bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 716.335333] env[62619]: DEBUG nova.compute.provider_tree [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 716.443936] env[62619]: INFO nova.compute.manager [-] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Took 1.14 seconds to deallocate network for instance. [ 716.446550] env[62619]: DEBUG nova.compute.claims [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 716.446737] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 716.452401] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 716.458676] env[62619]: DEBUG oslo_concurrency.lockutils [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] Releasing lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 716.459023] env[62619]: DEBUG nova.compute.manager [req-6374ea0b-51c1-452e-8ae1-bd17670ee8af req-453fc753-5f4e-4edf-be11-60fc57c8e8e1 service nova] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Received event network-vif-deleted-8e743676-d778-4222-8f5d-da5b23e50ec7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 716.757805] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e024a449-8b7e-4885-a60e-2450b5d19c9e tempest-ServerActionsTestOtherB-1805751122 tempest-ServerActionsTestOtherB-1805751122-project-member] Lock "709ed215-d501-409a-ab80-6c4b844d24e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 116.208s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 716.841171] env[62619]: DEBUG nova.scheduler.client.report [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 717.153196] env[62619]: DEBUG nova.compute.manager [req-e9558e9c-3338-49b5-826a-3d2294669c41 req-a15ee67f-a22e-4e6e-8a09-d19d65593722 service nova] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Received event network-changed-f1bd94e6-9fe8-40f6-993a-29348d3d4d43 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 717.153424] env[62619]: DEBUG nova.compute.manager [req-e9558e9c-3338-49b5-826a-3d2294669c41 req-a15ee67f-a22e-4e6e-8a09-d19d65593722 service nova] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Refreshing instance network info cache due to event network-changed-f1bd94e6-9fe8-40f6-993a-29348d3d4d43. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 717.154026] env[62619]: DEBUG oslo_concurrency.lockutils [req-e9558e9c-3338-49b5-826a-3d2294669c41 req-a15ee67f-a22e-4e6e-8a09-d19d65593722 service nova] Acquiring lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.154026] env[62619]: DEBUG oslo_concurrency.lockutils [req-e9558e9c-3338-49b5-826a-3d2294669c41 req-a15ee67f-a22e-4e6e-8a09-d19d65593722 service nova] Acquired lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.154026] env[62619]: DEBUG nova.network.neutron [req-e9558e9c-3338-49b5-826a-3d2294669c41 req-a15ee67f-a22e-4e6e-8a09-d19d65593722 service nova] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Refreshing network info cache for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 717.185892] env[62619]: ERROR nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. [ 717.185892] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 717.185892] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.185892] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 717.185892] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 717.185892] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 717.185892] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 717.185892] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 717.185892] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.185892] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 717.185892] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.185892] env[62619]: ERROR nova.compute.manager raise self.value [ 717.185892] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 717.185892] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 717.185892] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.185892] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 717.186363] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.186363] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 717.186363] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. [ 717.186363] env[62619]: ERROR nova.compute.manager [ 717.186363] env[62619]: Traceback (most recent call last): [ 717.186363] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 717.186363] env[62619]: listener.cb(fileno) [ 717.186363] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.186363] env[62619]: result = function(*args, **kwargs) [ 717.186363] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 717.186363] env[62619]: return func(*args, **kwargs) [ 717.186363] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.186363] env[62619]: raise e [ 717.186363] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.186363] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 717.186363] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 717.186363] env[62619]: created_port_ids = self._update_ports_for_instance( [ 717.186363] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 717.186363] env[62619]: with excutils.save_and_reraise_exception(): [ 717.186363] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.186363] env[62619]: self.force_reraise() [ 717.186363] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.186363] env[62619]: raise self.value [ 717.186363] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 717.186363] env[62619]: updated_port = self._update_port( [ 717.186363] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.186363] env[62619]: _ensure_no_port_binding_failure(port) [ 717.186363] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.186363] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 717.187172] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. [ 717.187172] env[62619]: Removing descriptor: 18 [ 717.263111] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 717.344860] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.902s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 717.345511] env[62619]: ERROR nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Traceback (most recent call last): [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self.driver.spawn(context, instance, image_meta, [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] vm_ref = self.build_virtual_machine(instance, [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.345511] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] for vif in network_info: [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] return self._sync_wrapper(fn, *args, **kwargs) [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self.wait() [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self[:] = self._gt.wait() [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] return self._exit_event.wait() [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] current.throw(*self._exc) [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.345854] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] result = function(*args, **kwargs) [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] return func(*args, **kwargs) [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] raise e [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] nwinfo = self.network_api.allocate_for_instance( [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] created_port_ids = self._update_ports_for_instance( [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] with excutils.save_and_reraise_exception(): [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] self.force_reraise() [ 717.347248] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] raise self.value [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] updated_port = self._update_port( [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] _ensure_no_port_binding_failure(port) [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] raise exception.PortBindingFailed(port_id=port['id']) [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] nova.exception.PortBindingFailed: Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. [ 717.347578] env[62619]: ERROR nova.compute.manager [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] [ 717.347578] env[62619]: DEBUG nova.compute.utils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 717.347843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.886s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 717.349009] env[62619]: INFO nova.compute.claims [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 717.352616] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Build of instance 7d8ae6c2-1453-4d61-a2b5-311a557087de was re-scheduled: Binding failed for port d3a780b5-e376-4201-8f67-6b386ac68f31, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 717.352616] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 717.352616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Acquiring lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.352616] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Acquired lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 717.352833] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 717.463141] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 717.490427] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 717.490665] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 717.490848] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 717.491058] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 717.491222] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 717.491408] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 717.491601] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 717.491789] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 717.491963] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 717.492162] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 717.492730] env[62619]: DEBUG nova.virt.hardware [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 717.493199] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-799e6be2-5d4c-4d99-91cf-78c92108019a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.501736] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bb854fd-16e5-4a3d-b676-27e1ce195d9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 717.515800] env[62619]: ERROR nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Traceback (most recent call last): [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] yield resources [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self.driver.spawn(context, instance, image_meta, [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] vm_ref = self.build_virtual_machine(instance, [ 717.515800] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] vif_infos = vmwarevif.get_vif_info(self._session, [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] for vif in network_info: [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] return self._sync_wrapper(fn, *args, **kwargs) [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self.wait() [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self[:] = self._gt.wait() [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] return self._exit_event.wait() [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 717.516282] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] current.throw(*self._exc) [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] result = function(*args, **kwargs) [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] return func(*args, **kwargs) [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] raise e [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] nwinfo = self.network_api.allocate_for_instance( [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] created_port_ids = self._update_ports_for_instance( [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] with excutils.save_and_reraise_exception(): [ 717.516908] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self.force_reraise() [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] raise self.value [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] updated_port = self._update_port( [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] _ensure_no_port_binding_failure(port) [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] raise exception.PortBindingFailed(port_id=port['id']) [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] nova.exception.PortBindingFailed: Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. [ 717.517288] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] [ 717.517288] env[62619]: INFO nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Terminating instance [ 717.518651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 717.689743] env[62619]: DEBUG nova.network.neutron [req-e9558e9c-3338-49b5-826a-3d2294669c41 req-a15ee67f-a22e-4e6e-8a09-d19d65593722 service nova] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 717.788271] env[62619]: DEBUG nova.network.neutron [req-e9558e9c-3338-49b5-826a-3d2294669c41 req-a15ee67f-a22e-4e6e-8a09-d19d65593722 service nova] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 717.803893] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 717.883944] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.017842] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 718.289100] env[62619]: DEBUG oslo_concurrency.lockutils [req-e9558e9c-3338-49b5-826a-3d2294669c41 req-a15ee67f-a22e-4e6e-8a09-d19d65593722 service nova] Releasing lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.289726] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquired lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 718.289915] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 718.522938] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Releasing lock "refresh_cache-7d8ae6c2-1453-4d61-a2b5-311a557087de" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 718.523197] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 718.523387] env[62619]: DEBUG nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 718.523596] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 718.544487] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.653726] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f114a636-a252-414c-a8ea-b185aceeb382 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.660169] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a897a1-dd87-4776-b80c-678a7f2ce28b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.690802] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6d175e1-13fc-4fdc-bd74-0111e243b6af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.698690] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe961717-04c4-441a-93e1-463645b04b11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 718.712833] env[62619]: DEBUG nova.compute.provider_tree [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 718.817203] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 718.916134] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.048634] env[62619]: DEBUG nova.network.neutron [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 719.218729] env[62619]: DEBUG nova.scheduler.client.report [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 719.230501] env[62619]: DEBUG nova.compute.manager [req-364e0f67-cf92-4a5f-889e-241e22faed95 req-f018d3d6-a9d5-496c-96d2-91f1bd7638be service nova] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Received event network-vif-deleted-f1bd94e6-9fe8-40f6-993a-29348d3d4d43 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 719.418376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Releasing lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 719.418922] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 719.419182] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 719.419612] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-69b3ea6d-2f1a-4448-8cb7-0c3c49d51b6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.434047] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d94eb98c-1494-46c4-ae41-a75893a25d17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 719.453409] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d92cd356-0e29-429d-9216-b376e91e0fe8 could not be found. [ 719.453636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 719.453814] env[62619]: INFO nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Took 0.03 seconds to destroy the instance on the hypervisor. [ 719.454071] env[62619]: DEBUG oslo.service.loopingcall [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 719.454295] env[62619]: DEBUG nova.compute.manager [-] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 719.454392] env[62619]: DEBUG nova.network.neutron [-] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 719.478808] env[62619]: DEBUG nova.network.neutron [-] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 719.551477] env[62619]: INFO nova.compute.manager [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] [instance: 7d8ae6c2-1453-4d61-a2b5-311a557087de] Took 1.03 seconds to deallocate network for instance. [ 719.553505] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Acquiring lock "cc6bdf77-2540-47e3-aed2-cb0c73b329cc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 719.553732] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Lock "cc6bdf77-2540-47e3-aed2-cb0c73b329cc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.725161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 719.725161] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 719.728090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.679s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 719.982207] env[62619]: DEBUG nova.network.neutron [-] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 720.232974] env[62619]: DEBUG nova.compute.utils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 720.238162] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 720.238210] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 720.297828] env[62619]: DEBUG nova.policy [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ede8684315fb4a9eaaf948fee505d61d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3395707c4d1149cdb048222f65f42428', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 720.486666] env[62619]: INFO nova.compute.manager [-] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Took 1.03 seconds to deallocate network for instance. [ 720.491016] env[62619]: DEBUG nova.compute.claims [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 720.491016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 720.537567] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc91e8a6-6839-46f2-972a-78e126b5c232 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.545516] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4c28f1-510c-4bde-9781-c5deb6e261db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.578620] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eccfdbe-3aac-4710-aad8-a6953f44bb24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.585965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c13a97-fa3a-4239-8595-17e4fe54038c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 720.598883] env[62619]: DEBUG nova.compute.provider_tree [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 720.600707] env[62619]: INFO nova.scheduler.client.report [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Deleted allocations for instance 7d8ae6c2-1453-4d61-a2b5-311a557087de [ 720.625627] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Successfully created port: 646f4a20-7d6f-4b45-bd0a-cc8c10a34513 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 720.742221] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 721.106914] env[62619]: DEBUG nova.scheduler.client.report [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 721.110309] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7488dd81-db18-4611-a23b-3177ec94c3cc tempest-ServerMetadataNegativeTestJSON-1174933449 tempest-ServerMetadataNegativeTestJSON-1174933449-project-member] Lock "7d8ae6c2-1453-4d61-a2b5-311a557087de" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 117.146s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.479445] env[62619]: ERROR nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. [ 721.479445] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 721.479445] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 721.479445] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 721.479445] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 721.479445] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 721.479445] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 721.479445] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 721.479445] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.479445] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 721.479445] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.479445] env[62619]: ERROR nova.compute.manager raise self.value [ 721.479445] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 721.479445] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 721.479445] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.479445] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 721.479929] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.479929] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 721.479929] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. [ 721.479929] env[62619]: ERROR nova.compute.manager [ 721.479929] env[62619]: Traceback (most recent call last): [ 721.479929] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 721.479929] env[62619]: listener.cb(fileno) [ 721.479929] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.479929] env[62619]: result = function(*args, **kwargs) [ 721.479929] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 721.479929] env[62619]: return func(*args, **kwargs) [ 721.479929] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 721.479929] env[62619]: raise e [ 721.479929] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 721.479929] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 721.479929] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 721.479929] env[62619]: created_port_ids = self._update_ports_for_instance( [ 721.479929] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 721.479929] env[62619]: with excutils.save_and_reraise_exception(): [ 721.479929] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.479929] env[62619]: self.force_reraise() [ 721.479929] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.479929] env[62619]: raise self.value [ 721.479929] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 721.479929] env[62619]: updated_port = self._update_port( [ 721.479929] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.479929] env[62619]: _ensure_no_port_binding_failure(port) [ 721.479929] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.479929] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 721.480771] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. [ 721.480771] env[62619]: Removing descriptor: 18 [ 721.614019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.884s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 721.614019] env[62619]: ERROR nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. [ 721.614019] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Traceback (most recent call last): [ 721.614019] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 721.614019] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self.driver.spawn(context, instance, image_meta, [ 721.614019] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 721.614019] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.614019] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.614019] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] vm_ref = self.build_virtual_machine(instance, [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] for vif in network_info: [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] return self._sync_wrapper(fn, *args, **kwargs) [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self.wait() [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self[:] = self._gt.wait() [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] return self._exit_event.wait() [ 721.614411] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] current.throw(*self._exc) [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] result = function(*args, **kwargs) [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] return func(*args, **kwargs) [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] raise e [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] nwinfo = self.network_api.allocate_for_instance( [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] created_port_ids = self._update_ports_for_instance( [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 721.614801] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] with excutils.save_and_reraise_exception(): [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] self.force_reraise() [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] raise self.value [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] updated_port = self._update_port( [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] _ensure_no_port_binding_failure(port) [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] raise exception.PortBindingFailed(port_id=port['id']) [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] nova.exception.PortBindingFailed: Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. [ 721.615190] env[62619]: ERROR nova.compute.manager [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] [ 721.615542] env[62619]: DEBUG nova.compute.utils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 721.619023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.568s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 721.619804] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 721.622236] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Build of instance de395cf9-2888-4a0d-a1b8-5ce4c36d6182 was re-scheduled: Binding failed for port 5b7e5bef-60fd-47c6-b905-8bd2d37fe055, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 721.622789] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 721.623124] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Acquiring lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.623382] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Acquired lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.623637] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 721.668359] env[62619]: DEBUG nova.compute.manager [req-6123e879-1fdc-473b-aa21-7cfac4cac8b8 req-48f7056d-28ca-4877-88c8-fc4e2a42f26a service nova] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Received event network-changed-646f4a20-7d6f-4b45-bd0a-cc8c10a34513 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 721.668580] env[62619]: DEBUG nova.compute.manager [req-6123e879-1fdc-473b-aa21-7cfac4cac8b8 req-48f7056d-28ca-4877-88c8-fc4e2a42f26a service nova] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Refreshing instance network info cache due to event network-changed-646f4a20-7d6f-4b45-bd0a-cc8c10a34513. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 721.668791] env[62619]: DEBUG oslo_concurrency.lockutils [req-6123e879-1fdc-473b-aa21-7cfac4cac8b8 req-48f7056d-28ca-4877-88c8-fc4e2a42f26a service nova] Acquiring lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 721.668930] env[62619]: DEBUG oslo_concurrency.lockutils [req-6123e879-1fdc-473b-aa21-7cfac4cac8b8 req-48f7056d-28ca-4877-88c8-fc4e2a42f26a service nova] Acquired lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 721.669139] env[62619]: DEBUG nova.network.neutron [req-6123e879-1fdc-473b-aa21-7cfac4cac8b8 req-48f7056d-28ca-4877-88c8-fc4e2a42f26a service nova] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Refreshing network info cache for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 721.751513] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 721.775544] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 721.775777] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 721.775923] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 721.776111] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 721.776252] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 721.776391] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 721.776588] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 721.776741] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 721.776899] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 721.777069] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 721.777238] env[62619]: DEBUG nova.virt.hardware [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 721.778117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd0ff684-ea83-4bbd-bcff-e6ddccd6c0b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.785950] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fb65af9-5491-414b-bc8e-715a95437ebf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 721.802909] env[62619]: ERROR nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Traceback (most recent call last): [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] yield resources [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self.driver.spawn(context, instance, image_meta, [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self._vmops.spawn(context, instance, image_meta, injected_files, [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] vm_ref = self.build_virtual_machine(instance, [ 721.802909] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] vif_infos = vmwarevif.get_vif_info(self._session, [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] for vif in network_info: [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] return self._sync_wrapper(fn, *args, **kwargs) [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self.wait() [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self[:] = self._gt.wait() [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] return self._exit_event.wait() [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 721.803272] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] current.throw(*self._exc) [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] result = function(*args, **kwargs) [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] return func(*args, **kwargs) [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] raise e [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] nwinfo = self.network_api.allocate_for_instance( [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] created_port_ids = self._update_ports_for_instance( [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] with excutils.save_and_reraise_exception(): [ 721.803595] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self.force_reraise() [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] raise self.value [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] updated_port = self._update_port( [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] _ensure_no_port_binding_failure(port) [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] raise exception.PortBindingFailed(port_id=port['id']) [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] nova.exception.PortBindingFailed: Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. [ 721.803926] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] [ 721.803926] env[62619]: INFO nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Terminating instance [ 721.804261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Acquiring lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 722.151101] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 722.152634] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 722.193757] env[62619]: DEBUG nova.network.neutron [req-6123e879-1fdc-473b-aa21-7cfac4cac8b8 req-48f7056d-28ca-4877-88c8-fc4e2a42f26a service nova] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 722.288642] env[62619]: DEBUG nova.network.neutron [req-6123e879-1fdc-473b-aa21-7cfac4cac8b8 req-48f7056d-28ca-4877-88c8-fc4e2a42f26a service nova] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.321291] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 722.438762] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cb56cef-166d-4a45-b254-7f44393c79b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.449631] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2208cd6f-4b02-4084-88c3-a3f38f42a775 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.483355] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-805c2c4f-ee58-4201-a9ff-7d314e344512 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.491784] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48d05f1d-17a1-4553-b57c-db640bf99f11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 722.504455] env[62619]: DEBUG nova.compute.provider_tree [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 722.792433] env[62619]: DEBUG oslo_concurrency.lockutils [req-6123e879-1fdc-473b-aa21-7cfac4cac8b8 req-48f7056d-28ca-4877-88c8-fc4e2a42f26a service nova] Releasing lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.792433] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Acquired lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 722.792433] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 722.822668] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Releasing lock "refresh_cache-de395cf9-2888-4a0d-a1b8-5ce4c36d6182" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 722.822920] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 722.823119] env[62619]: DEBUG nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 722.823282] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 722.839328] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 723.009589] env[62619]: DEBUG nova.scheduler.client.report [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 723.311614] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 723.341730] env[62619]: DEBUG nova.network.neutron [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.413052] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 723.514130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.897s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 723.514779] env[62619]: ERROR nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Traceback (most recent call last): [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self.driver.spawn(context, instance, image_meta, [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] vm_ref = self.build_virtual_machine(instance, [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] vif_infos = vmwarevif.get_vif_info(self._session, [ 723.514779] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] for vif in network_info: [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] return self._sync_wrapper(fn, *args, **kwargs) [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self.wait() [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self[:] = self._gt.wait() [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] return self._exit_event.wait() [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] current.throw(*self._exc) [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 723.515114] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] result = function(*args, **kwargs) [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] return func(*args, **kwargs) [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] raise e [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] nwinfo = self.network_api.allocate_for_instance( [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] created_port_ids = self._update_ports_for_instance( [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] with excutils.save_and_reraise_exception(): [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] self.force_reraise() [ 723.515532] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] raise self.value [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] updated_port = self._update_port( [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] _ensure_no_port_binding_failure(port) [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] raise exception.PortBindingFailed(port_id=port['id']) [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] nova.exception.PortBindingFailed: Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. [ 723.515919] env[62619]: ERROR nova.compute.manager [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] [ 723.515919] env[62619]: DEBUG nova.compute.utils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 723.516756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.186s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 723.518179] env[62619]: INFO nova.compute.claims [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 723.520709] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Build of instance 37bb9933-80fa-4a54-82fe-f864a411425f was re-scheduled: Binding failed for port f5e6ea78-c085-4c39-a6ee-0692f3cd3b1a, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 723.521131] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 723.521360] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 723.521513] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquired lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 723.521679] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 723.732905] env[62619]: DEBUG nova.compute.manager [req-7b89782a-b321-41ad-9808-72da12bf4cd1 req-45b6df37-da08-42c9-9427-f5e462b33075 service nova] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Received event network-vif-deleted-646f4a20-7d6f-4b45-bd0a-cc8c10a34513 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 723.845195] env[62619]: INFO nova.compute.manager [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] [instance: de395cf9-2888-4a0d-a1b8-5ce4c36d6182] Took 1.02 seconds to deallocate network for instance. [ 723.916034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Releasing lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 723.916496] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 723.916734] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 723.917114] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-eaef7b79-ad44-4b3f-a94c-5bcf195f31f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.926142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a435d1e2-0deb-4e15-a690-9ee22257bd4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 723.947049] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ffae1b5d-83fc-4007-be0f-c6f1e285f824 could not be found. [ 723.947261] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 723.947438] env[62619]: INFO nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Took 0.03 seconds to destroy the instance on the hypervisor. [ 723.947689] env[62619]: DEBUG oslo.service.loopingcall [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 723.947909] env[62619]: DEBUG nova.compute.manager [-] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 723.947998] env[62619]: DEBUG nova.network.neutron [-] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 723.964909] env[62619]: DEBUG nova.network.neutron [-] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.041480] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.108230] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.466904] env[62619]: DEBUG nova.network.neutron [-] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 724.612714] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Releasing lock "refresh_cache-37bb9933-80fa-4a54-82fe-f864a411425f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 724.612957] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 724.613155] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 724.613317] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 724.627727] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 724.779272] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f321288-a8ab-40eb-981e-98c19e8d393a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.787410] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9100ee25-7377-4b3d-acf7-7a62d8515e8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.815965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-326c302d-fcf7-4767-bff1-aae3f7cd7e5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.823450] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a271fbab-55ad-4904-979c-7f52dc955e87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 724.835965] env[62619]: DEBUG nova.compute.provider_tree [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 724.872889] env[62619]: INFO nova.scheduler.client.report [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Deleted allocations for instance de395cf9-2888-4a0d-a1b8-5ce4c36d6182 [ 724.969301] env[62619]: INFO nova.compute.manager [-] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Took 1.02 seconds to deallocate network for instance. [ 724.971898] env[62619]: DEBUG nova.compute.claims [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 724.972095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 725.131391] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 725.339266] env[62619]: DEBUG nova.scheduler.client.report [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 725.380275] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f842f683-de3d-4ca5-a7be-922d54ebe8c2 tempest-ServerActionsTestJSON-893871560 tempest-ServerActionsTestJSON-893871560-project-member] Lock "de395cf9-2888-4a0d-a1b8-5ce4c36d6182" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.373s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.634139] env[62619]: INFO nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 37bb9933-80fa-4a54-82fe-f864a411425f] Took 1.02 seconds to deallocate network for instance. [ 725.844124] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 725.844682] env[62619]: DEBUG nova.compute.manager [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 725.847197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.757s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 725.848754] env[62619]: INFO nova.compute.claims [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 725.883023] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 726.353593] env[62619]: DEBUG nova.compute.utils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 726.356732] env[62619]: DEBUG nova.compute.manager [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 726.401301] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 726.664812] env[62619]: INFO nova.scheduler.client.report [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Deleted allocations for instance 37bb9933-80fa-4a54-82fe-f864a411425f [ 726.859084] env[62619]: DEBUG nova.compute.manager [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 727.129024] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c423cc7c-0fe4-4ff0-ace4-ce951f3f46b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.136075] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29fa4079-f208-436d-98ad-8aaf3f0316a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.166341] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddd2f56-d353-403f-be18-774cb48937d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.172578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "37bb9933-80fa-4a54-82fe-f864a411425f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 118.301s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 727.175751] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-224a4930-6dfb-426b-b338-251af40a9f31 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.191496] env[62619]: DEBUG nova.compute.provider_tree [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 727.675670] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 727.694684] env[62619]: DEBUG nova.scheduler.client.report [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 727.870286] env[62619]: DEBUG nova.compute.manager [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 727.896439] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 727.896684] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 727.896835] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 727.897020] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 727.897168] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 727.897310] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 727.897510] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 727.897664] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 727.897819] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 727.897974] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 727.898161] env[62619]: DEBUG nova.virt.hardware [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 727.899037] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9276634-6d15-4feb-b799-221c41fbf1bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.906967] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62556a74-e306-41be-8197-97c363c2eea7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.919754] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 727.925182] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Creating folder: Project (02fb0946a7b4476b937ea2877176e838). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.925484] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ef940670-ed4f-4776-a587-e1cf3543a93f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.934862] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Created folder: Project (02fb0946a7b4476b937ea2877176e838) in parent group-v290436. [ 727.935041] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Creating folder: Instances. Parent ref: group-v290453. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 727.935253] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9ad784e4-4092-4036-a5bf-841cc710b446 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.944184] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Created folder: Instances in parent group-v290453. [ 727.944433] env[62619]: DEBUG oslo.service.loopingcall [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 727.944630] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 727.944863] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a641dcd8-7852-4076-aaba-9009eb481045 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 727.960748] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 727.960748] env[62619]: value = "task-1364858" [ 727.960748] env[62619]: _type = "Task" [ 727.960748] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 727.967948] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364858, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.202608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.355s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 728.203026] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 728.206660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 728.206936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.874s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 728.470569] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364858, 'name': CreateVM_Task, 'duration_secs': 0.244473} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.470775] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 728.471260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.471471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.471900] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 728.472181] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c466141d-6d12-42f6-8525-e9566e91ef95 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 728.476835] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 728.476835] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52772fd4-7cc0-e27f-416e-887de6a4c376" [ 728.476835] env[62619]: _type = "Task" [ 728.476835] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 728.484137] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52772fd4-7cc0-e27f-416e-887de6a4c376, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 728.717726] env[62619]: DEBUG nova.compute.utils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 728.717726] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 728.717726] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 728.804727] env[62619]: DEBUG nova.policy [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1c1e4f5897914808ac77b1d25ae9c529', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '700fdfa7078c46a5a8c617466d719183', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 728.987116] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52772fd4-7cc0-e27f-416e-887de6a4c376, 'name': SearchDatastore_Task, 'duration_secs': 0.011526} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 728.990930] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 728.991373] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 728.991771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 728.992085] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 728.994018] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 728.994018] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7a4fa1ec-02bd-4f2d-92ef-08a847f6d3dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.003521] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 729.003521] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 729.003521] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-acbe80ab-6cdc-456d-b786-d7e6c26d448f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.010764] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 729.010764] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522879aa-2160-a588-82a8-142040bc63bc" [ 729.010764] env[62619]: _type = "Task" [ 729.010764] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.019319] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522879aa-2160-a588-82a8-142040bc63bc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.075709] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7609199-f849-428e-b3e3-0ae4a886d0dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.084064] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3680e68-29ac-48fb-a1eb-88bfc6261363 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.116022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bdf3516-f32e-457a-945b-156ce336bab3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.125818] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6213a7e8-66c0-470c-97ac-3b8f3f4ae2af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.137378] env[62619]: DEBUG nova.compute.provider_tree [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 729.223339] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 729.282425] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Successfully created port: 65ca14ee-87b9-4463-9156-09fde4e937be {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 729.524019] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522879aa-2160-a588-82a8-142040bc63bc, 'name': SearchDatastore_Task, 'duration_secs': 0.008734} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 729.524019] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22380dee-4495-4653-b847-6a00604fc51c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.527598] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 729.527598] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f3a289-f065-832e-4c90-35da42cf808c" [ 729.527598] env[62619]: _type = "Task" [ 729.527598] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 729.537515] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f3a289-f065-832e-4c90-35da42cf808c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 729.643151] env[62619]: DEBUG nova.scheduler.client.report [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 729.727506] env[62619]: INFO nova.virt.block_device [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Booting with volume 062e79e5-3250-4a7a-8935-2f1556fd0acd at /dev/sda [ 729.779991] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-bd33441e-5f88-46a6-8da7-da36a804c628 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.793580] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8716c2ee-3bce-4252-8170-48fa1ce78d92 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.820188] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e6f738d0-74a2-4804-ba3b-b076d9bf3729 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.827679] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf84c9e1-70ca-4fde-a4e7-bcbed3b8f3a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.849115] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c0b9ed0-e8ac-4371-afe7-4f5f9f4dcf47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.855028] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4866691f-30a0-4e9a-af49-a269da7b559e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 729.868400] env[62619]: DEBUG nova.virt.block_device [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Updating existing volume attachment record: 3f3b1580-589d-4d32-a108-c1be40251ed7 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 730.038694] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f3a289-f065-832e-4c90-35da42cf808c, 'name': SearchDatastore_Task, 'duration_secs': 0.009052} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 730.038939] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 730.039203] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 9735d6d1-eb10-46b4-a273-10b1351033f0/9735d6d1-eb10-46b4-a273-10b1351033f0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 730.039472] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a5767512-96cc-4fc7-b2b1-27506093f452 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.045910] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 730.045910] env[62619]: value = "task-1364859" [ 730.045910] env[62619]: _type = "Task" [ 730.045910] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 730.053259] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.146456] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.939s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 730.147306] env[62619]: ERROR nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Traceback (most recent call last): [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self.driver.spawn(context, instance, image_meta, [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] vm_ref = self.build_virtual_machine(instance, [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] vif_infos = vmwarevif.get_vif_info(self._session, [ 730.147306] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] for vif in network_info: [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] return self._sync_wrapper(fn, *args, **kwargs) [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self.wait() [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self[:] = self._gt.wait() [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] return self._exit_event.wait() [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] result = hub.switch() [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 730.147967] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] return self.greenlet.switch() [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] result = function(*args, **kwargs) [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] return func(*args, **kwargs) [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] raise e [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] nwinfo = self.network_api.allocate_for_instance( [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] created_port_ids = self._update_ports_for_instance( [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] with excutils.save_and_reraise_exception(): [ 730.148349] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] self.force_reraise() [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] raise self.value [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] updated_port = self._update_port( [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] _ensure_no_port_binding_failure(port) [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] raise exception.PortBindingFailed(port_id=port['id']) [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] nova.exception.PortBindingFailed: Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. [ 730.148746] env[62619]: ERROR nova.compute.manager [instance: d02f8bcb-c754-4308-9c90-260624010cb0] [ 730.149046] env[62619]: DEBUG nova.compute.utils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 730.150168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.703s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 730.154412] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Build of instance d02f8bcb-c754-4308-9c90-260624010cb0 was re-scheduled: Binding failed for port ef42913a-1667-4c32-8ee2-13c61f14c9ef, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 730.155061] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 730.155289] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.155438] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquired lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.155601] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 730.555980] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364859, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 730.686495] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 730.769266] env[62619]: DEBUG nova.compute.manager [req-2312407a-2b2e-4cec-859a-329da9c398bc req-029bcc37-d170-4822-bb7c-74af0c7d2d3e service nova] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Received event network-changed-65ca14ee-87b9-4463-9156-09fde4e937be {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 730.769509] env[62619]: DEBUG nova.compute.manager [req-2312407a-2b2e-4cec-859a-329da9c398bc req-029bcc37-d170-4822-bb7c-74af0c7d2d3e service nova] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Refreshing instance network info cache due to event network-changed-65ca14ee-87b9-4463-9156-09fde4e937be. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 730.770994] env[62619]: DEBUG oslo_concurrency.lockutils [req-2312407a-2b2e-4cec-859a-329da9c398bc req-029bcc37-d170-4822-bb7c-74af0c7d2d3e service nova] Acquiring lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 730.771216] env[62619]: DEBUG oslo_concurrency.lockutils [req-2312407a-2b2e-4cec-859a-329da9c398bc req-029bcc37-d170-4822-bb7c-74af0c7d2d3e service nova] Acquired lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 730.771422] env[62619]: DEBUG nova.network.neutron [req-2312407a-2b2e-4cec-859a-329da9c398bc req-029bcc37-d170-4822-bb7c-74af0c7d2d3e service nova] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Refreshing network info cache for port 65ca14ee-87b9-4463-9156-09fde4e937be {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 730.841760] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 730.959520] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12210cfb-2f8b-408a-a019-a46de8ee9bbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 730.968261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e931936-0c76-495c-bb9d-a6778011f3e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.003353] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1458ac-0197-4364-9f49-98bff8ba2f63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.012297] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e06c940-671f-4ae8-99a9-0c54aef70e42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.031215] env[62619]: DEBUG nova.compute.provider_tree [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 731.056703] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364859, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.513479} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.056985] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 9735d6d1-eb10-46b4-a273-10b1351033f0/9735d6d1-eb10-46b4-a273-10b1351033f0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 731.057180] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 731.057421] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-72432f76-01df-4c37-808f-23fd0c1cef79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.064660] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 731.064660] env[62619]: value = "task-1364860" [ 731.064660] env[62619]: _type = "Task" [ 731.064660] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.072706] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364860, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.077181] env[62619]: ERROR nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. [ 731.077181] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 731.077181] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.077181] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 731.077181] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 731.077181] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 731.077181] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 731.077181] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 731.077181] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.077181] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 731.077181] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.077181] env[62619]: ERROR nova.compute.manager raise self.value [ 731.077181] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 731.077181] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 731.077181] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.077181] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 731.078775] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.078775] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 731.078775] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. [ 731.078775] env[62619]: ERROR nova.compute.manager [ 731.078775] env[62619]: Traceback (most recent call last): [ 731.078775] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 731.078775] env[62619]: listener.cb(fileno) [ 731.078775] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 731.078775] env[62619]: result = function(*args, **kwargs) [ 731.078775] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 731.078775] env[62619]: return func(*args, **kwargs) [ 731.078775] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 731.078775] env[62619]: raise e [ 731.078775] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 731.078775] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 731.078775] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 731.078775] env[62619]: created_port_ids = self._update_ports_for_instance( [ 731.078775] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 731.078775] env[62619]: with excutils.save_and_reraise_exception(): [ 731.078775] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 731.078775] env[62619]: self.force_reraise() [ 731.078775] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 731.078775] env[62619]: raise self.value [ 731.078775] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 731.078775] env[62619]: updated_port = self._update_port( [ 731.078775] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 731.078775] env[62619]: _ensure_no_port_binding_failure(port) [ 731.078775] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 731.078775] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 731.079576] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. [ 731.079576] env[62619]: Removing descriptor: 18 [ 731.293085] env[62619]: DEBUG nova.network.neutron [req-2312407a-2b2e-4cec-859a-329da9c398bc req-029bcc37-d170-4822-bb7c-74af0c7d2d3e service nova] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.344424] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Releasing lock "refresh_cache-d02f8bcb-c754-4308-9c90-260624010cb0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.344730] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 731.344901] env[62619]: DEBUG nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 731.345097] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 731.365956] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 731.429988] env[62619]: DEBUG nova.network.neutron [req-2312407a-2b2e-4cec-859a-329da9c398bc req-029bcc37-d170-4822-bb7c-74af0c7d2d3e service nova] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.553527] env[62619]: ERROR nova.scheduler.client.report [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [req-ff1d80f8-3a86-4923-b730-07cb1cd7194d] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-ff1d80f8-3a86-4923-b730-07cb1cd7194d"}]}: nova.exception.PortBindingFailed: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. [ 731.571258] env[62619]: DEBUG nova.scheduler.client.report [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 731.579414] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364860, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069418} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 731.579707] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 731.580500] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca868d45-005b-4507-889c-9ed670976192 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.600123] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Reconfiguring VM instance instance-00000028 to attach disk [datastore1] 9735d6d1-eb10-46b4-a273-10b1351033f0/9735d6d1-eb10-46b4-a273-10b1351033f0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 731.601134] env[62619]: DEBUG nova.scheduler.client.report [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 731.602053] env[62619]: DEBUG nova.compute.provider_tree [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 731.603183] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3a609888-b539-49ba-833c-cd77f08d2440 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.623457] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 731.623457] env[62619]: value = "task-1364861" [ 731.623457] env[62619]: _type = "Task" [ 731.623457] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 731.628159] env[62619]: DEBUG nova.scheduler.client.report [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 731.633300] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364861, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 731.648897] env[62619]: DEBUG nova.scheduler.client.report [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 731.871081] env[62619]: DEBUG nova.network.neutron [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 731.934799] env[62619]: DEBUG oslo_concurrency.lockutils [req-2312407a-2b2e-4cec-859a-329da9c398bc req-029bcc37-d170-4822-bb7c-74af0c7d2d3e service nova] Releasing lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 731.948522] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e730f9-d7b5-43fa-bce3-23a3ea3f24a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.956124] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48ec05d2-5b87-470e-a101-c318f0f8e9a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.987169] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 731.987695] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 731.987901] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 731.988062] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 731.988241] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 731.988386] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 731.988560] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 731.988760] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 731.988911] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 731.989080] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 731.989236] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 731.989413] env[62619]: DEBUG nova.virt.hardware [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 731.990370] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cdb3de8-7bb6-4349-9fe2-2b3f0f807156 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 731.993398] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34ab063c-775c-4d94-9c4e-a0fff75e83ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.002581] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b06e1537-bd94-42b4-922d-cad3a16ec604 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.007234] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71500ab6-8404-46b4-ac1e-a692246c6192 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.020816] env[62619]: DEBUG nova.compute.provider_tree [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 732.029762] env[62619]: ERROR nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Traceback (most recent call last): [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] yield resources [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self.driver.spawn(context, instance, image_meta, [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] vm_ref = self.build_virtual_machine(instance, [ 732.029762] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] for vif in network_info: [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] return self._sync_wrapper(fn, *args, **kwargs) [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self.wait() [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self[:] = self._gt.wait() [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] return self._exit_event.wait() [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 732.030184] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] current.throw(*self._exc) [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] result = function(*args, **kwargs) [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] return func(*args, **kwargs) [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] raise e [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] nwinfo = self.network_api.allocate_for_instance( [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] created_port_ids = self._update_ports_for_instance( [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] with excutils.save_and_reraise_exception(): [ 732.030571] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self.force_reraise() [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] raise self.value [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] updated_port = self._update_port( [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] _ensure_no_port_binding_failure(port) [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] raise exception.PortBindingFailed(port_id=port['id']) [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] nova.exception.PortBindingFailed: Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. [ 732.030932] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] [ 732.030932] env[62619]: INFO nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Terminating instance [ 732.031976] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Acquiring lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 732.032145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Acquired lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 732.032317] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 732.134451] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364861, 'name': ReconfigVM_Task, 'duration_secs': 0.311421} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 732.136068] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Reconfigured VM instance instance-00000028 to attach disk [datastore1] 9735d6d1-eb10-46b4-a273-10b1351033f0/9735d6d1-eb10-46b4-a273-10b1351033f0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 732.137173] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f0e4676-6d53-4aad-85f7-0a2999d6f624 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 732.144074] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 732.144074] env[62619]: value = "task-1364862" [ 732.144074] env[62619]: _type = "Task" [ 732.144074] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 732.153417] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364862, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.375778] env[62619]: INFO nova.compute.manager [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: d02f8bcb-c754-4308-9c90-260624010cb0] Took 1.03 seconds to deallocate network for instance. [ 732.554691] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 732.577815] env[62619]: DEBUG nova.scheduler.client.report [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 63 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 732.578153] env[62619]: DEBUG nova.compute.provider_tree [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 63 to 64 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 732.578331] env[62619]: DEBUG nova.compute.provider_tree [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 732.640613] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 732.661851] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364862, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 732.791518] env[62619]: DEBUG nova.compute.manager [req-430dbd57-1f48-462e-92c0-f80d2bca33ef req-c3e0a76f-e79d-4e8f-8878-ee649c5b057f service nova] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Received event network-vif-deleted-65ca14ee-87b9-4463-9156-09fde4e937be {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 733.083153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 2.933s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 733.083823] env[62619]: ERROR nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Traceback (most recent call last): [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self.driver.spawn(context, instance, image_meta, [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] vm_ref = self.build_virtual_machine(instance, [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 733.083823] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] for vif in network_info: [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] return self._sync_wrapper(fn, *args, **kwargs) [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self.wait() [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self[:] = self._gt.wait() [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] return self._exit_event.wait() [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] result = hub.switch() [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 733.084198] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] return self.greenlet.switch() [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] result = function(*args, **kwargs) [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] return func(*args, **kwargs) [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] raise e [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] nwinfo = self.network_api.allocate_for_instance( [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] created_port_ids = self._update_ports_for_instance( [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] with excutils.save_and_reraise_exception(): [ 733.084532] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] self.force_reraise() [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] raise self.value [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] updated_port = self._update_port( [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] _ensure_no_port_binding_failure(port) [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] raise exception.PortBindingFailed(port_id=port['id']) [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] nova.exception.PortBindingFailed: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. [ 733.084856] env[62619]: ERROR nova.compute.manager [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] [ 733.085147] env[62619]: DEBUG nova.compute.utils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 733.086699] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.283s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.088097] env[62619]: INFO nova.compute.claims [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 733.090637] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Build of instance 25df5673-9633-40de-8e72-a8620f19a6f9 was re-scheduled: Binding failed for port 8e743676-d778-4222-8f5d-da5b23e50ec7, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 733.091111] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 733.091337] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 733.091480] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquired lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 733.093391] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 733.149527] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Releasing lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 733.150409] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 733.150495] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f62e3280-d9f5-47d8-bd93-b9c19c8fbc57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.161516] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364862, 'name': Rename_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.165241] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d704176-ced4-4ae4-9195-afaf06b24eac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.188052] env[62619]: WARNING nova.virt.vmwareapi.driver [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab could not be found. [ 733.189111] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 733.189111] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0ea39ccf-e48b-46fb-8b0f-268d7d1ccb6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.197356] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c720b2a-3760-4ebf-81ec-7b917fd38ab4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.219614] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab could not be found. [ 733.219851] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 733.220041] env[62619]: INFO nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Took 0.07 seconds to destroy the instance on the hypervisor. [ 733.220301] env[62619]: DEBUG oslo.service.loopingcall [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 733.220528] env[62619]: DEBUG nova.compute.manager [-] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 733.221610] env[62619]: DEBUG nova.network.neutron [-] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 733.243594] env[62619]: DEBUG nova.network.neutron [-] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 733.406587] env[62619]: INFO nova.scheduler.client.report [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Deleted allocations for instance d02f8bcb-c754-4308-9c90-260624010cb0 [ 733.454859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Acquiring lock "1def15dd-e2ad-470e-bff8-9121df881d46" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 733.454859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Lock "1def15dd-e2ad-470e-bff8-9121df881d46" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 733.613327] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 733.660639] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364862, 'name': Rename_Task, 'duration_secs': 1.136461} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 733.660639] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 733.660639] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fe0e1d0-5b40-45d0-b4d6-b182b6d83858 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 733.667638] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 733.667638] env[62619]: value = "task-1364863" [ 733.667638] env[62619]: _type = "Task" [ 733.667638] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 733.671124] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.676722] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364863, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 733.747067] env[62619]: DEBUG nova.network.neutron [-] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 733.917896] env[62619]: DEBUG oslo_concurrency.lockutils [None req-87d43b63-abee-4df0-bf82-eace89e7e352 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "d02f8bcb-c754-4308-9c90-260624010cb0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.025s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 734.173989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Releasing lock "refresh_cache-25df5673-9633-40de-8e72-a8620f19a6f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 734.173989] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 734.174334] env[62619]: DEBUG nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 734.174334] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 734.180768] env[62619]: DEBUG oslo_vmware.api [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364863, 'name': PowerOnVM_Task, 'duration_secs': 0.485513} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 734.181583] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 734.181725] env[62619]: INFO nova.compute.manager [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Took 6.31 seconds to spawn the instance on the hypervisor. [ 734.181875] env[62619]: DEBUG nova.compute.manager [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 734.183025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09d4b712-8cc9-410f-bcfe-74bde6637dab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.196569] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 734.249017] env[62619]: INFO nova.compute.manager [-] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Took 1.03 seconds to deallocate network for instance. [ 734.365026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2afd578-e03e-4c50-b73f-68e1b30bebd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.372572] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4acc5cf-7286-47f3-bbf5-7b802418ba0a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.401781] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8bdb909-ff28-41dc-88ef-72f7c06ecfd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.410173] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d4d5131-9849-4831-ba89-cacd2123e926 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.427022] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 734.427022] env[62619]: DEBUG nova.compute.provider_tree [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 734.581998] env[62619]: DEBUG nova.compute.manager [None req-bf44e488-2011-41ae-bf9f-6b65aead33f5 tempest-ServerDiagnosticsV248Test-532249198 tempest-ServerDiagnosticsV248Test-532249198-project-admin] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 734.583137] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3030114f-2f11-4c75-9500-b0efed5252e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.592812] env[62619]: INFO nova.compute.manager [None req-bf44e488-2011-41ae-bf9f-6b65aead33f5 tempest-ServerDiagnosticsV248Test-532249198 tempest-ServerDiagnosticsV248Test-532249198-project-admin] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Retrieving diagnostics [ 734.593535] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0386fd44-e7ee-4f84-8d3f-0505bcfea32d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 734.702066] env[62619]: DEBUG nova.network.neutron [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 734.703238] env[62619]: INFO nova.compute.manager [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Took 25.39 seconds to build instance. [ 734.807770] env[62619]: INFO nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Took 0.56 seconds to detach 1 volumes for instance. [ 734.810996] env[62619]: DEBUG nova.compute.claims [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 734.811190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 734.933885] env[62619]: DEBUG nova.scheduler.client.report [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 734.951255] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.205099] env[62619]: INFO nova.compute.manager [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: 25df5673-9633-40de-8e72-a8620f19a6f9] Took 1.03 seconds to deallocate network for instance. [ 735.207924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c155d3d1-943d-4f1e-9291-29462c4313e5 tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "9735d6d1-eb10-46b4-a273-10b1351033f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.829s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.440447] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.354s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 735.440995] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 735.443572] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.954s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.583473] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.583711] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.606612] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "55316613-3507-4386-a7f9-dbcc52f26327" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 735.606840] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "55316613-3507-4386-a7f9-dbcc52f26327" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 735.712233] env[62619]: DEBUG nova.compute.manager [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 735.951560] env[62619]: DEBUG nova.compute.utils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 735.952662] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 735.952835] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 736.004631] env[62619]: DEBUG nova.policy [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1ab4be10d444359a7a3b245ec9b9ea0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c583f4e7b29743aabd3e96f7c53fa04f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 736.208613] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13768a51-bdfa-49de-bbf9-f634bb49a1e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.221137] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8f9a39-68c1-487d-b59d-40497c001987 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.254031] env[62619]: INFO nova.scheduler.client.report [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Deleted allocations for instance 25df5673-9633-40de-8e72-a8620f19a6f9 [ 736.259646] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 736.261198] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646b85f7-783f-486f-9b1f-ab19a8cfcf6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.269718] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Successfully created port: 7669df5b-17ac-4bff-91cb-982f857bd13c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 736.272665] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c1c28dd-509a-4a10-8d96-9cdf29099a24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 736.286904] env[62619]: DEBUG nova.compute.provider_tree [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 736.456903] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 736.768179] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bc964228-4942-4151-83f2-98aef156a616 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "25df5673-9633-40de-8e72-a8620f19a6f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.314s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 736.790753] env[62619]: DEBUG nova.scheduler.client.report [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 736.971751] env[62619]: DEBUG nova.compute.manager [req-b6593fa9-77cd-46e1-ae48-f44370465c81 req-792d174f-fa65-42c5-89b5-bce068d2507a service nova] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Received event network-changed-7669df5b-17ac-4bff-91cb-982f857bd13c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 736.971948] env[62619]: DEBUG nova.compute.manager [req-b6593fa9-77cd-46e1-ae48-f44370465c81 req-792d174f-fa65-42c5-89b5-bce068d2507a service nova] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Refreshing instance network info cache due to event network-changed-7669df5b-17ac-4bff-91cb-982f857bd13c. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 736.972177] env[62619]: DEBUG oslo_concurrency.lockutils [req-b6593fa9-77cd-46e1-ae48-f44370465c81 req-792d174f-fa65-42c5-89b5-bce068d2507a service nova] Acquiring lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 736.972321] env[62619]: DEBUG oslo_concurrency.lockutils [req-b6593fa9-77cd-46e1-ae48-f44370465c81 req-792d174f-fa65-42c5-89b5-bce068d2507a service nova] Acquired lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 736.972474] env[62619]: DEBUG nova.network.neutron [req-b6593fa9-77cd-46e1-ae48-f44370465c81 req-792d174f-fa65-42c5-89b5-bce068d2507a service nova] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Refreshing network info cache for port 7669df5b-17ac-4bff-91cb-982f857bd13c {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 737.149134] env[62619]: ERROR nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. [ 737.149134] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 737.149134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.149134] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 737.149134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 737.149134] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 737.149134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 737.149134] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 737.149134] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.149134] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 737.149134] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.149134] env[62619]: ERROR nova.compute.manager raise self.value [ 737.149134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 737.149134] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 737.149134] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.149134] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 737.149636] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.149636] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 737.149636] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. [ 737.149636] env[62619]: ERROR nova.compute.manager [ 737.149636] env[62619]: Traceback (most recent call last): [ 737.149636] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 737.149636] env[62619]: listener.cb(fileno) [ 737.149636] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.149636] env[62619]: result = function(*args, **kwargs) [ 737.149636] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 737.149636] env[62619]: return func(*args, **kwargs) [ 737.149636] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.149636] env[62619]: raise e [ 737.149636] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.149636] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 737.149636] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 737.149636] env[62619]: created_port_ids = self._update_ports_for_instance( [ 737.149636] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 737.149636] env[62619]: with excutils.save_and_reraise_exception(): [ 737.149636] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.149636] env[62619]: self.force_reraise() [ 737.149636] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.149636] env[62619]: raise self.value [ 737.149636] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 737.149636] env[62619]: updated_port = self._update_port( [ 737.149636] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.149636] env[62619]: _ensure_no_port_binding_failure(port) [ 737.149636] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.149636] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 737.150473] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. [ 737.150473] env[62619]: Removing descriptor: 18 [ 737.274617] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 737.295494] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.852s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 737.296202] env[62619]: ERROR nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Traceback (most recent call last): [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self.driver.spawn(context, instance, image_meta, [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] vm_ref = self.build_virtual_machine(instance, [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] vif_infos = vmwarevif.get_vif_info(self._session, [ 737.296202] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] for vif in network_info: [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] return self._sync_wrapper(fn, *args, **kwargs) [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self.wait() [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self[:] = self._gt.wait() [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] return self._exit_event.wait() [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] current.throw(*self._exc) [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.296538] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] result = function(*args, **kwargs) [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] return func(*args, **kwargs) [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] raise e [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] nwinfo = self.network_api.allocate_for_instance( [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] created_port_ids = self._update_ports_for_instance( [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] with excutils.save_and_reraise_exception(): [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] self.force_reraise() [ 737.296932] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] raise self.value [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] updated_port = self._update_port( [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] _ensure_no_port_binding_failure(port) [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] raise exception.PortBindingFailed(port_id=port['id']) [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] nova.exception.PortBindingFailed: Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. [ 737.297309] env[62619]: ERROR nova.compute.manager [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] [ 737.297309] env[62619]: DEBUG nova.compute.utils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 737.299126] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.147s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 737.299688] env[62619]: INFO nova.compute.claims [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 737.302403] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Build of instance d92cd356-0e29-429d-9216-b376e91e0fe8 was re-scheduled: Binding failed for port f1bd94e6-9fe8-40f6-993a-29348d3d4d43, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 737.302564] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 737.302740] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquiring lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.302880] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Acquired lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 737.303046] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 737.466691] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 737.491517] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 737.491781] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 737.491934] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 737.492121] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 737.492263] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 737.492404] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 737.492598] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 737.492752] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 737.492912] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 737.493079] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 737.493243] env[62619]: DEBUG nova.virt.hardware [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 737.494367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d2251df-9e84-4bd8-ac88-30ff533f78b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.497390] env[62619]: DEBUG nova.network.neutron [req-b6593fa9-77cd-46e1-ae48-f44370465c81 req-792d174f-fa65-42c5-89b5-bce068d2507a service nova] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.503964] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d862e69-93f9-4a25-a105-b76591a2c806 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 737.517787] env[62619]: ERROR nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Traceback (most recent call last): [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] yield resources [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self.driver.spawn(context, instance, image_meta, [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self._vmops.spawn(context, instance, image_meta, injected_files, [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] vm_ref = self.build_virtual_machine(instance, [ 737.517787] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] vif_infos = vmwarevif.get_vif_info(self._session, [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] for vif in network_info: [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] return self._sync_wrapper(fn, *args, **kwargs) [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self.wait() [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self[:] = self._gt.wait() [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] return self._exit_event.wait() [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 737.518216] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] current.throw(*self._exc) [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] result = function(*args, **kwargs) [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] return func(*args, **kwargs) [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] raise e [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] nwinfo = self.network_api.allocate_for_instance( [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] created_port_ids = self._update_ports_for_instance( [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] with excutils.save_and_reraise_exception(): [ 737.518651] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self.force_reraise() [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] raise self.value [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] updated_port = self._update_port( [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] _ensure_no_port_binding_failure(port) [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] raise exception.PortBindingFailed(port_id=port['id']) [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] nova.exception.PortBindingFailed: Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. [ 737.519046] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] [ 737.519046] env[62619]: INFO nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Terminating instance [ 737.520166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 737.583904] env[62619]: DEBUG nova.network.neutron [req-b6593fa9-77cd-46e1-ae48-f44370465c81 req-792d174f-fa65-42c5-89b5-bce068d2507a service nova] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 737.798313] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 737.822406] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 737.865545] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.086724] env[62619]: DEBUG oslo_concurrency.lockutils [req-b6593fa9-77cd-46e1-ae48-f44370465c81 req-792d174f-fa65-42c5-89b5-bce068d2507a service nova] Releasing lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.087147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 738.087331] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 738.368830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Releasing lock "refresh_cache-d92cd356-0e29-429d-9216-b376e91e0fe8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 738.368830] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 738.369164] env[62619]: DEBUG nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 738.369164] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 738.389225] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.572290] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762a0697-a906-4816-99a5-19310f9dea97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.580437] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eef06e0-5dc9-43fe-b275-60dfe0b23bf8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.613269] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1301f99d-9020-47b0-a9a6-fb6ca007205d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.620590] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f3cc3ba-f853-4132-ba8c-bedeccec634d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 738.634598] env[62619]: DEBUG nova.compute.provider_tree [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 738.636355] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 738.741614] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 738.894298] env[62619]: DEBUG nova.network.neutron [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 739.001334] env[62619]: DEBUG nova.compute.manager [req-4eaa5b31-c64a-49c3-8613-880f42da8691 req-33655458-b92f-41c7-a638-85735eac1b44 service nova] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Received event network-vif-deleted-7669df5b-17ac-4bff-91cb-982f857bd13c {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 739.140065] env[62619]: DEBUG nova.scheduler.client.report [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 739.244448] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 739.244888] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 739.245169] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 739.245535] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8a5a106a-cfe5-4f38-9aaf-6ba786b9c4e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.254766] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-678c00d4-5961-42a4-b4cd-b2bad54959a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 739.280014] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 73145811-c355-462e-9a8e-ffccf2efe683 could not be found. [ 739.280300] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 739.280511] env[62619]: INFO nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Took 0.04 seconds to destroy the instance on the hypervisor. [ 739.280821] env[62619]: DEBUG oslo.service.loopingcall [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 739.281093] env[62619]: DEBUG nova.compute.manager [-] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 739.281189] env[62619]: DEBUG nova.network.neutron [-] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 739.297109] env[62619]: DEBUG nova.network.neutron [-] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 739.397411] env[62619]: INFO nova.compute.manager [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] [instance: d92cd356-0e29-429d-9216-b376e91e0fe8] Took 1.03 seconds to deallocate network for instance. [ 739.644909] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.347s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 739.645428] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 739.648350] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.676s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 739.800245] env[62619]: DEBUG nova.network.neutron [-] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 740.158255] env[62619]: DEBUG nova.compute.utils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 740.161039] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 740.161039] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 740.202119] env[62619]: DEBUG nova.policy [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ce526a1d824fe8b6573fa80adcd53f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33242a5e0a764cf3b8af687fc4302e8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 740.303333] env[62619]: INFO nova.compute.manager [-] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Took 1.02 seconds to deallocate network for instance. [ 740.310154] env[62619]: DEBUG nova.compute.claims [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 740.310471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 740.434886] env[62619]: INFO nova.scheduler.client.report [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Deleted allocations for instance d92cd356-0e29-429d-9216-b376e91e0fe8 [ 740.482649] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17b64c85-4e67-4bd6-8973-8932fe101cfe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.490659] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Successfully created port: ee20bfdf-1d5a-43cd-8098-8c97aa9ff954 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 740.493409] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19168d8d-3c51-45fc-bf7d-7e519c55832f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.526564] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdbe8883-5319-4bb3-9cad-81ab7b7c1137 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.534681] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c82aded-3054-4a6a-be0d-6f8dd5fd73ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 740.549340] env[62619]: DEBUG nova.compute.provider_tree [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 740.663883] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 740.943294] env[62619]: DEBUG oslo_concurrency.lockutils [None req-037819a8-9c1a-4fb7-ac22-f32b6e6d5953 tempest-ListImageFiltersTestJSON-2037966745 tempest-ListImageFiltersTestJSON-2037966745-project-member] Lock "d92cd356-0e29-429d-9216-b376e91e0fe8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 123.021s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.052857] env[62619]: DEBUG nova.scheduler.client.report [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 741.159858] env[62619]: DEBUG nova.compute.manager [req-e930aff4-911e-4de0-9a87-95f55e89028c req-d9df7934-d823-4e1c-af80-1c21951385dc service nova] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Received event network-changed-ee20bfdf-1d5a-43cd-8098-8c97aa9ff954 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 741.160060] env[62619]: DEBUG nova.compute.manager [req-e930aff4-911e-4de0-9a87-95f55e89028c req-d9df7934-d823-4e1c-af80-1c21951385dc service nova] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Refreshing instance network info cache due to event network-changed-ee20bfdf-1d5a-43cd-8098-8c97aa9ff954. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 741.160271] env[62619]: DEBUG oslo_concurrency.lockutils [req-e930aff4-911e-4de0-9a87-95f55e89028c req-d9df7934-d823-4e1c-af80-1c21951385dc service nova] Acquiring lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.160410] env[62619]: DEBUG oslo_concurrency.lockutils [req-e930aff4-911e-4de0-9a87-95f55e89028c req-d9df7934-d823-4e1c-af80-1c21951385dc service nova] Acquired lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.160563] env[62619]: DEBUG nova.network.neutron [req-e930aff4-911e-4de0-9a87-95f55e89028c req-d9df7934-d823-4e1c-af80-1c21951385dc service nova] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Refreshing network info cache for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 741.335587] env[62619]: ERROR nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. [ 741.335587] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 741.335587] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.335587] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 741.335587] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 741.335587] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 741.335587] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 741.335587] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 741.335587] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.335587] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 741.335587] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.335587] env[62619]: ERROR nova.compute.manager raise self.value [ 741.335587] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 741.335587] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 741.335587] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.335587] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 741.336165] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.336165] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 741.336165] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. [ 741.336165] env[62619]: ERROR nova.compute.manager [ 741.336165] env[62619]: Traceback (most recent call last): [ 741.336165] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 741.336165] env[62619]: listener.cb(fileno) [ 741.336165] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.336165] env[62619]: result = function(*args, **kwargs) [ 741.336165] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 741.336165] env[62619]: return func(*args, **kwargs) [ 741.336165] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.336165] env[62619]: raise e [ 741.336165] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.336165] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 741.336165] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 741.336165] env[62619]: created_port_ids = self._update_ports_for_instance( [ 741.336165] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 741.336165] env[62619]: with excutils.save_and_reraise_exception(): [ 741.336165] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.336165] env[62619]: self.force_reraise() [ 741.336165] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.336165] env[62619]: raise self.value [ 741.336165] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 741.336165] env[62619]: updated_port = self._update_port( [ 741.336165] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.336165] env[62619]: _ensure_no_port_binding_failure(port) [ 741.336165] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.336165] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 741.337137] env[62619]: nova.exception.PortBindingFailed: Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. [ 741.337137] env[62619]: Removing descriptor: 18 [ 741.446137] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 741.560851] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.912s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 741.562830] env[62619]: ERROR nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Traceback (most recent call last): [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self.driver.spawn(context, instance, image_meta, [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] vm_ref = self.build_virtual_machine(instance, [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.562830] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] for vif in network_info: [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] return self._sync_wrapper(fn, *args, **kwargs) [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self.wait() [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self[:] = self._gt.wait() [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] return self._exit_event.wait() [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] current.throw(*self._exc) [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.563301] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] result = function(*args, **kwargs) [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] return func(*args, **kwargs) [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] raise e [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] nwinfo = self.network_api.allocate_for_instance( [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] created_port_ids = self._update_ports_for_instance( [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] with excutils.save_and_reraise_exception(): [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] self.force_reraise() [ 741.563658] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] raise self.value [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] updated_port = self._update_port( [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] _ensure_no_port_binding_failure(port) [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] raise exception.PortBindingFailed(port_id=port['id']) [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] nova.exception.PortBindingFailed: Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. [ 741.564097] env[62619]: ERROR nova.compute.manager [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] [ 741.564097] env[62619]: DEBUG nova.compute.utils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 741.564415] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.163s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 741.565829] env[62619]: INFO nova.compute.claims [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 741.568655] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Build of instance ffae1b5d-83fc-4007-be0f-c6f1e285f824 was re-scheduled: Binding failed for port 646f4a20-7d6f-4b45-bd0a-cc8c10a34513, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 741.569118] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 741.569377] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Acquiring lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.569525] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Acquired lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 741.569710] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 741.670929] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 741.683955] env[62619]: DEBUG nova.network.neutron [req-e930aff4-911e-4de0-9a87-95f55e89028c req-d9df7934-d823-4e1c-af80-1c21951385dc service nova] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 741.697417] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 741.697647] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 741.697988] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 741.698099] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 741.698292] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 741.698471] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 741.698803] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 741.699015] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 741.699359] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 741.699406] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 741.699586] env[62619]: DEBUG nova.virt.hardware [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 741.700475] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f51161d-a645-42ce-98aa-4d53ce9f519a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.708533] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6a9c5cb-59fb-42aa-9a6b-7b199c8edee8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 741.723686] env[62619]: ERROR nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Traceback (most recent call last): [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] yield resources [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self.driver.spawn(context, instance, image_meta, [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] vm_ref = self.build_virtual_machine(instance, [ 741.723686] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] vif_infos = vmwarevif.get_vif_info(self._session, [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] for vif in network_info: [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] return self._sync_wrapper(fn, *args, **kwargs) [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self.wait() [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self[:] = self._gt.wait() [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] return self._exit_event.wait() [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 741.724163] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] current.throw(*self._exc) [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] result = function(*args, **kwargs) [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] return func(*args, **kwargs) [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] raise e [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] nwinfo = self.network_api.allocate_for_instance( [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] created_port_ids = self._update_ports_for_instance( [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] with excutils.save_and_reraise_exception(): [ 741.724592] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self.force_reraise() [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] raise self.value [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] updated_port = self._update_port( [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] _ensure_no_port_binding_failure(port) [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] raise exception.PortBindingFailed(port_id=port['id']) [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] nova.exception.PortBindingFailed: Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. [ 741.724986] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] [ 741.724986] env[62619]: INFO nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Terminating instance [ 741.726226] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 741.773802] env[62619]: DEBUG nova.network.neutron [req-e930aff4-911e-4de0-9a87-95f55e89028c req-d9df7934-d823-4e1c-af80-1c21951385dc service nova] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 741.968348] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 742.089863] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 742.165672] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 742.277174] env[62619]: DEBUG oslo_concurrency.lockutils [req-e930aff4-911e-4de0-9a87-95f55e89028c req-d9df7934-d823-4e1c-af80-1c21951385dc service nova] Releasing lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.277612] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 742.277797] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 742.669921] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Releasing lock "refresh_cache-ffae1b5d-83fc-4007-be0f-c6f1e285f824" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 742.670220] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 742.670426] env[62619]: DEBUG nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 742.670594] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 742.684879] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 742.796084] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 742.811027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aaeca62-1881-46b1-b00c-9f34e8692fe3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.818452] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56bf93a1-e6cd-485b-8766-425af9462058 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.851279] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-072e9dc9-655a-4483-889b-f7af4960d41e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.860342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a18af798-0bf6-4864-9e59-b6917bfa2cd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 742.872289] env[62619]: DEBUG nova.compute.provider_tree [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 742.892431] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.188115] env[62619]: DEBUG nova.network.neutron [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 743.202291] env[62619]: DEBUG nova.compute.manager [req-44b28541-53b7-4ef8-8cab-9e72ef178b1c req-04658a26-8094-433e-ac63-81f5e85450b9 service nova] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Received event network-vif-deleted-ee20bfdf-1d5a-43cd-8098-8c97aa9ff954 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 743.375706] env[62619]: DEBUG nova.scheduler.client.report [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 743.394678] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 743.395105] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 743.395294] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 743.395794] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74047618-33be-43b6-bd66-0efd8d4dbb44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.405451] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b99fcfd-dfac-480c-8adf-8dc1a6cc2abc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 743.427024] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c307cc2f-d0c9-49ab-aafa-768a34199f0c could not be found. [ 743.427183] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 743.427365] env[62619]: INFO nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 743.427604] env[62619]: DEBUG oslo.service.loopingcall [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 743.427825] env[62619]: DEBUG nova.compute.manager [-] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 743.427916] env[62619]: DEBUG nova.network.neutron [-] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 743.444605] env[62619]: DEBUG nova.network.neutron [-] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 743.690731] env[62619]: INFO nova.compute.manager [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] [instance: ffae1b5d-83fc-4007-be0f-c6f1e285f824] Took 1.02 seconds to deallocate network for instance. [ 743.880246] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.316s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 743.880789] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 743.883254] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.677s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 743.884633] env[62619]: INFO nova.compute.claims [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 743.947322] env[62619]: DEBUG nova.network.neutron [-] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 744.391432] env[62619]: DEBUG nova.compute.utils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 744.392833] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 744.393147] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 744.436560] env[62619]: DEBUG nova.policy [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4cdd5547cb3a4b5493ef44880000ef13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd104b68e8640f7a50da22df521f2d7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 744.449401] env[62619]: INFO nova.compute.manager [-] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Took 1.02 seconds to deallocate network for instance. [ 744.452171] env[62619]: DEBUG nova.compute.claims [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 744.452408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 744.722092] env[62619]: INFO nova.scheduler.client.report [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Deleted allocations for instance ffae1b5d-83fc-4007-be0f-c6f1e285f824 [ 744.759675] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Successfully created port: 3ca3421b-047d-49b3-bc8b-d41ad48edeba {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 744.897346] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 745.169012] env[62619]: DEBUG nova.compute.manager [None req-ff60ed20-187a-4843-a7d9-ded06155c1b7 tempest-ServerDiagnosticsV248Test-532249198 tempest-ServerDiagnosticsV248Test-532249198-project-admin] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 745.170727] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ed04751-761a-40b1-89fa-63127677d666 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.178398] env[62619]: INFO nova.compute.manager [None req-ff60ed20-187a-4843-a7d9-ded06155c1b7 tempest-ServerDiagnosticsV248Test-532249198 tempest-ServerDiagnosticsV248Test-532249198-project-admin] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Retrieving diagnostics [ 745.179218] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5597d7-49a8-4152-bc6f-bc8edded6e01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.182607] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Successfully created port: b35fb6b0-1238-47b6-aee5-fe5be3feba99 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.208848] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95554429-a1d0-47c1-a978-34563fd4d942 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.216066] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c8fd823-ca62-4aee-80f1-61fdcdbd84a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.244583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ab271d2-bfeb-4225-b9cd-b85d8d076924 tempest-ImagesNegativeTestJSON-1134895079 tempest-ImagesNegativeTestJSON-1134895079-project-member] Lock "ffae1b5d-83fc-4007-be0f-c6f1e285f824" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.184s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 745.246762] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95d657f5-125f-4d76-b9d2-6895c3432554 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.254305] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ade173d-cc25-456e-b87c-87b877e023f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.269412] env[62619]: DEBUG nova.compute.provider_tree [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 745.434492] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Successfully created port: 1af8800f-459f-49fe-96dd-842e8b892f5c {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 745.750110] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 745.772513] env[62619]: DEBUG nova.scheduler.client.report [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 745.913720] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 745.941791] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 745.942133] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 745.942353] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 745.942597] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 745.942793] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 745.942986] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 745.943210] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 745.943364] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 745.943568] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 745.944062] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 745.944062] env[62619]: DEBUG nova.virt.hardware [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 745.944678] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96650174-1f29-4840-ad48-0375a6326de9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 745.954037] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194472bb-7ec5-4015-80f1-15441f544b40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 746.101061] env[62619]: DEBUG nova.compute.manager [req-9299758a-2a65-4033-add2-6c5de0486f22 req-d9399656-694b-46b5-99e0-994876726550 service nova] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Received event network-changed-3ca3421b-047d-49b3-bc8b-d41ad48edeba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 746.101348] env[62619]: DEBUG nova.compute.manager [req-9299758a-2a65-4033-add2-6c5de0486f22 req-d9399656-694b-46b5-99e0-994876726550 service nova] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Refreshing instance network info cache due to event network-changed-3ca3421b-047d-49b3-bc8b-d41ad48edeba. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 746.101738] env[62619]: DEBUG oslo_concurrency.lockutils [req-9299758a-2a65-4033-add2-6c5de0486f22 req-d9399656-694b-46b5-99e0-994876726550 service nova] Acquiring lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.101925] env[62619]: DEBUG oslo_concurrency.lockutils [req-9299758a-2a65-4033-add2-6c5de0486f22 req-d9399656-694b-46b5-99e0-994876726550 service nova] Acquired lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.102182] env[62619]: DEBUG nova.network.neutron [req-9299758a-2a65-4033-add2-6c5de0486f22 req-d9399656-694b-46b5-99e0-994876726550 service nova] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Refreshing network info cache for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 746.278153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.394s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.278326] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 746.282274] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.282614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 11.471s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.287897] env[62619]: ERROR nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. [ 746.287897] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 746.287897] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 746.287897] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 746.287897] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 746.287897] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 746.287897] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 746.287897] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 746.287897] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 746.287897] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 746.287897] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 746.287897] env[62619]: ERROR nova.compute.manager raise self.value [ 746.287897] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 746.287897] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 746.287897] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 746.287897] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 746.288366] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 746.288366] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 746.288366] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. [ 746.288366] env[62619]: ERROR nova.compute.manager [ 746.288366] env[62619]: Traceback (most recent call last): [ 746.288366] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 746.288366] env[62619]: listener.cb(fileno) [ 746.288366] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 746.288366] env[62619]: result = function(*args, **kwargs) [ 746.288366] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 746.288366] env[62619]: return func(*args, **kwargs) [ 746.288366] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 746.288366] env[62619]: raise e [ 746.288366] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 746.288366] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 746.288366] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 746.288366] env[62619]: created_port_ids = self._update_ports_for_instance( [ 746.288366] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 746.288366] env[62619]: with excutils.save_and_reraise_exception(): [ 746.288366] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 746.288366] env[62619]: self.force_reraise() [ 746.288366] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 746.288366] env[62619]: raise self.value [ 746.288366] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 746.288366] env[62619]: updated_port = self._update_port( [ 746.288366] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 746.288366] env[62619]: _ensure_no_port_binding_failure(port) [ 746.288366] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 746.288366] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 746.289251] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. [ 746.289251] env[62619]: Removing descriptor: 18 [ 746.289251] env[62619]: ERROR nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Traceback (most recent call last): [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] yield resources [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self.driver.spawn(context, instance, image_meta, [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 746.289251] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] vm_ref = self.build_virtual_machine(instance, [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] vif_infos = vmwarevif.get_vif_info(self._session, [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] for vif in network_info: [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] return self._sync_wrapper(fn, *args, **kwargs) [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self.wait() [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self[:] = self._gt.wait() [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] return self._exit_event.wait() [ 746.289649] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] result = hub.switch() [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] return self.greenlet.switch() [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] result = function(*args, **kwargs) [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] return func(*args, **kwargs) [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] raise e [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] nwinfo = self.network_api.allocate_for_instance( [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 746.290009] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] created_port_ids = self._update_ports_for_instance( [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] with excutils.save_and_reraise_exception(): [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self.force_reraise() [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] raise self.value [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] updated_port = self._update_port( [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] _ensure_no_port_binding_failure(port) [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 746.290380] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] raise exception.PortBindingFailed(port_id=port['id']) [ 746.290820] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] nova.exception.PortBindingFailed: Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. [ 746.290820] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] [ 746.290820] env[62619]: INFO nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Terminating instance [ 746.292272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.324670] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquiring lock "9735d6d1-eb10-46b4-a273-10b1351033f0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.325161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "9735d6d1-eb10-46b4-a273-10b1351033f0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.325161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquiring lock "9735d6d1-eb10-46b4-a273-10b1351033f0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 746.325311] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "9735d6d1-eb10-46b4-a273-10b1351033f0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 746.325464] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "9735d6d1-eb10-46b4-a273-10b1351033f0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 746.328263] env[62619]: INFO nova.compute.manager [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Terminating instance [ 746.335780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquiring lock "refresh_cache-9735d6d1-eb10-46b4-a273-10b1351033f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 746.335780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquired lock "refresh_cache-9735d6d1-eb10-46b4-a273-10b1351033f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 746.335780] env[62619]: DEBUG nova.network.neutron [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 746.637860] env[62619]: DEBUG nova.network.neutron [req-9299758a-2a65-4033-add2-6c5de0486f22 req-d9399656-694b-46b5-99e0-994876726550 service nova] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.717717] env[62619]: DEBUG nova.network.neutron [req-9299758a-2a65-4033-add2-6c5de0486f22 req-d9399656-694b-46b5-99e0-994876726550 service nova] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 746.787672] env[62619]: DEBUG nova.compute.utils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 746.798426] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 746.798426] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 746.844228] env[62619]: DEBUG nova.policy [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b521f2117b9441eea877a38d47f428ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ed19573ec84647b39c783ef4ebc13a9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 746.856885] env[62619]: DEBUG nova.network.neutron [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 746.959374] env[62619]: DEBUG nova.network.neutron [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.084758] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95bf103c-07c9-42b2-a7bf-9f9c2f736170 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.095911] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb6141c5-8457-4b75-a246-0bd4d8494377 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.130598] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Successfully created port: f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 747.132839] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92e27ce-e654-4a96-9b16-a2e5d3aa5166 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.141470] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28aa3215-e753-454b-9952-73d393e54517 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.154671] env[62619]: DEBUG nova.compute.provider_tree [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 747.220253] env[62619]: DEBUG oslo_concurrency.lockutils [req-9299758a-2a65-4033-add2-6c5de0486f22 req-d9399656-694b-46b5-99e0-994876726550 service nova] Releasing lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.220624] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquired lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 747.220809] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 747.295708] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 747.465106] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Releasing lock "refresh_cache-9735d6d1-eb10-46b4-a273-10b1351033f0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 747.465557] env[62619]: DEBUG nova.compute.manager [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 747.465749] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 747.466640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-746d5d24-f4a4-4f28-8fd1-d49fae484ab8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.475745] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 747.475977] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c13f62e-60ac-46f6-a4b2-c92bf03c6534 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 747.483875] env[62619]: DEBUG oslo_vmware.api [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 747.483875] env[62619]: value = "task-1364864" [ 747.483875] env[62619]: _type = "Task" [ 747.483875] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 747.491684] env[62619]: DEBUG oslo_vmware.api [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364864, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 747.657455] env[62619]: DEBUG nova.scheduler.client.report [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 747.761146] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 747.881595] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 747.993181] env[62619]: DEBUG oslo_vmware.api [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364864, 'name': PowerOffVM_Task, 'duration_secs': 0.198821} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 747.993454] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 747.993620] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 747.993881] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-68710bf0-c2c8-4faf-9cb7-5cb8827d9f91 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.020231] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 748.020447] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 748.020617] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Deleting the datastore file [datastore1] 9735d6d1-eb10-46b4-a273-10b1351033f0 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 748.020880] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b25b77e0-c0a1-414a-80e1-b4bed26ef983 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.028641] env[62619]: DEBUG oslo_vmware.api [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for the task: (returnval){ [ 748.028641] env[62619]: value = "task-1364866" [ 748.028641] env[62619]: _type = "Task" [ 748.028641] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 748.037915] env[62619]: DEBUG oslo_vmware.api [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364866, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 748.088399] env[62619]: ERROR nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. [ 748.088399] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 748.088399] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.088399] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 748.088399] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.088399] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 748.088399] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.088399] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 748.088399] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.088399] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 748.088399] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.088399] env[62619]: ERROR nova.compute.manager raise self.value [ 748.088399] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.088399] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 748.088399] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.088399] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 748.089220] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.089220] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 748.089220] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. [ 748.089220] env[62619]: ERROR nova.compute.manager [ 748.089220] env[62619]: Traceback (most recent call last): [ 748.089220] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 748.089220] env[62619]: listener.cb(fileno) [ 748.089220] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.089220] env[62619]: result = function(*args, **kwargs) [ 748.089220] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.089220] env[62619]: return func(*args, **kwargs) [ 748.089220] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.089220] env[62619]: raise e [ 748.089220] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.089220] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 748.089220] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.089220] env[62619]: created_port_ids = self._update_ports_for_instance( [ 748.089220] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.089220] env[62619]: with excutils.save_and_reraise_exception(): [ 748.089220] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.089220] env[62619]: self.force_reraise() [ 748.089220] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.089220] env[62619]: raise self.value [ 748.089220] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.089220] env[62619]: updated_port = self._update_port( [ 748.089220] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.089220] env[62619]: _ensure_no_port_binding_failure(port) [ 748.089220] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.089220] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 748.090502] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. [ 748.090502] env[62619]: Removing descriptor: 18 [ 748.128697] env[62619]: DEBUG nova.compute.manager [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Received event network-vif-deleted-3ca3421b-047d-49b3-bc8b-d41ad48edeba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 748.128902] env[62619]: DEBUG nova.compute.manager [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Received event network-changed-f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 748.129046] env[62619]: DEBUG nova.compute.manager [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Refreshing instance network info cache due to event network-changed-f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 748.129397] env[62619]: DEBUG oslo_concurrency.lockutils [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] Acquiring lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.129458] env[62619]: DEBUG oslo_concurrency.lockutils [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] Acquired lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.129600] env[62619]: DEBUG nova.network.neutron [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Refreshing network info cache for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 748.162606] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.880s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 748.162881] env[62619]: ERROR nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Traceback (most recent call last): [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self.driver.spawn(context, instance, image_meta, [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] vm_ref = self.build_virtual_machine(instance, [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.162881] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] for vif in network_info: [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] return self._sync_wrapper(fn, *args, **kwargs) [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self.wait() [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self[:] = self._gt.wait() [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] return self._exit_event.wait() [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] current.throw(*self._exc) [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.163342] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] result = function(*args, **kwargs) [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] return func(*args, **kwargs) [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] raise e [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] nwinfo = self.network_api.allocate_for_instance( [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] created_port_ids = self._update_ports_for_instance( [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] with excutils.save_and_reraise_exception(): [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] self.force_reraise() [ 748.163749] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] raise self.value [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] updated_port = self._update_port( [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] _ensure_no_port_binding_failure(port) [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] raise exception.PortBindingFailed(port_id=port['id']) [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] nova.exception.PortBindingFailed: Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. [ 748.164187] env[62619]: ERROR nova.compute.manager [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] [ 748.164187] env[62619]: DEBUG nova.compute.utils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 748.164842] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.214s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 748.166284] env[62619]: INFO nova.compute.claims [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 748.169258] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Build of instance ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab was re-scheduled: Binding failed for port 65ca14ee-87b9-4463-9156-09fde4e937be, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 748.169406] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 748.170061] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Acquiring lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.170061] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Acquired lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 748.170061] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 748.304878] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 748.330198] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 748.330448] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 748.330601] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 748.330781] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 748.330932] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 748.331113] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 748.331371] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 748.331471] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 748.331676] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 748.331819] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 748.331981] env[62619]: DEBUG nova.virt.hardware [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 748.332824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf1b9650-c2f4-4202-98bc-c467dc8a040f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.342464] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d254b21-9dda-4f51-a3a5-870e70fab7fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.362074] env[62619]: ERROR nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Traceback (most recent call last): [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] yield resources [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self.driver.spawn(context, instance, image_meta, [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] vm_ref = self.build_virtual_machine(instance, [ 748.362074] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] for vif in network_info: [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] return self._sync_wrapper(fn, *args, **kwargs) [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self.wait() [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self[:] = self._gt.wait() [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] return self._exit_event.wait() [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 748.362566] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] current.throw(*self._exc) [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] result = function(*args, **kwargs) [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] return func(*args, **kwargs) [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] raise e [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] nwinfo = self.network_api.allocate_for_instance( [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] created_port_ids = self._update_ports_for_instance( [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] with excutils.save_and_reraise_exception(): [ 748.362984] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self.force_reraise() [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] raise self.value [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] updated_port = self._update_port( [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] _ensure_no_port_binding_failure(port) [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] raise exception.PortBindingFailed(port_id=port['id']) [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] nova.exception.PortBindingFailed: Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. [ 748.363446] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] [ 748.363446] env[62619]: INFO nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Terminating instance [ 748.364706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Acquiring lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 748.384298] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Releasing lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 748.384690] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 748.384880] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 748.385181] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f43307b5-a4fc-4e80-8f37-c015bff0afd9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.393721] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0811c17d-8f93-4f42-b990-8dcb9ceee503 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 748.414542] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c9c375f1-dd7f-43fb-acf6-45e766a8333d could not be found. [ 748.414749] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 748.414929] env[62619]: INFO nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Took 0.03 seconds to destroy the instance on the hypervisor. [ 748.415191] env[62619]: DEBUG oslo.service.loopingcall [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.415397] env[62619]: DEBUG nova.compute.manager [-] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 748.415489] env[62619]: DEBUG nova.network.neutron [-] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 748.522931] env[62619]: DEBUG nova.network.neutron [-] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.537366] env[62619]: DEBUG oslo_vmware.api [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Task: {'id': task-1364866, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.100466} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 748.537611] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 748.537792] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 748.537963] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 748.538149] env[62619]: INFO nova.compute.manager [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Took 1.07 seconds to destroy the instance on the hypervisor. [ 748.538381] env[62619]: DEBUG oslo.service.loopingcall [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 748.538561] env[62619]: DEBUG nova.compute.manager [-] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 748.538701] env[62619]: DEBUG nova.network.neutron [-] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 748.553361] env[62619]: DEBUG nova.network.neutron [-] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.654105] env[62619]: DEBUG nova.network.neutron [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.705529] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 748.883147] env[62619]: DEBUG nova.network.neutron [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 748.915102] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.058765] env[62619]: DEBUG nova.network.neutron [-] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.389900] env[62619]: DEBUG oslo_concurrency.lockutils [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] Releasing lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.390085] env[62619]: DEBUG nova.compute.manager [req-0dda4080-1952-4e50-85d7-62ec73bf4b35 req-fb95a07e-2e72-4800-8ed2-f44c172082e2 service nova] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Received event network-vif-deleted-f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 749.390586] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Acquired lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 749.390720] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 749.417236] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Releasing lock "refresh_cache-ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 749.417543] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 749.417800] env[62619]: DEBUG nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 749.418052] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 749.435775] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.475447] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e82793fd-44c7-4688-bab8-59ebd7621045 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.483191] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b570d99-8cc5-4c72-8e76-7f2037f6c1f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.513419] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d088a37-2967-46fa-a688-be77ee324b33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.520423] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5043487d-056d-459d-87b1-a11c257fec2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 749.533234] env[62619]: DEBUG nova.compute.provider_tree [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 749.550732] env[62619]: DEBUG nova.network.neutron [-] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.560357] env[62619]: INFO nova.compute.manager [-] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Took 1.02 seconds to deallocate network for instance. [ 749.909949] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 749.938711] env[62619]: DEBUG nova.network.neutron [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 749.996247] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 750.035877] env[62619]: DEBUG nova.scheduler.client.report [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 750.053135] env[62619]: INFO nova.compute.manager [-] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Took 1.64 seconds to deallocate network for instance. [ 750.055887] env[62619]: DEBUG nova.compute.claims [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 750.056223] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.066178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 750.445570] env[62619]: INFO nova.compute.manager [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] [instance: ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab] Took 1.02 seconds to deallocate network for instance. [ 750.499212] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Releasing lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 750.499656] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 750.499848] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 750.500152] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-cb7f0c53-dbdb-44f3-b679-f62dd76c7252 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.511644] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fce5f7e5-3a7d-4dbe-94af-b44086a8cfa5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 750.532757] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1 could not be found. [ 750.532969] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 750.533162] env[62619]: INFO nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Took 0.03 seconds to destroy the instance on the hypervisor. [ 750.533398] env[62619]: DEBUG oslo.service.loopingcall [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 750.533605] env[62619]: DEBUG nova.compute.manager [-] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 750.533699] env[62619]: DEBUG nova.network.neutron [-] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 750.540525] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.376s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 750.541032] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 750.544502] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.284s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 750.544793] env[62619]: INFO nova.compute.claims [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 750.552981] env[62619]: DEBUG nova.network.neutron [-] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 751.053580] env[62619]: DEBUG nova.compute.utils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 751.056971] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 751.057253] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 751.060195] env[62619]: DEBUG nova.network.neutron [-] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 751.116600] env[62619]: DEBUG nova.policy [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1bbb2cdeea5649399669b97d22a596c7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05500ec9ee6d46de8e04be67b55e1c46', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 751.437129] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Successfully created port: db858b6e-cad6-4091-9388-8d468e6eeaa7 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 751.482504] env[62619]: INFO nova.scheduler.client.report [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Deleted allocations for instance ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab [ 751.560133] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 751.563682] env[62619]: INFO nova.compute.manager [-] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Took 1.03 seconds to deallocate network for instance. [ 751.566525] env[62619]: DEBUG nova.compute.claims [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 751.566790] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.866159] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd7fb95-3294-47e1-b6eb-63491a8bc197 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.874886] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5334fe5d-c31c-4409-886c-3cfe423e7c1d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.911588] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e447f6c-7399-4e98-8c2c-4b91b194d7ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.921741] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "49fd766a-d798-415e-b5eb-4ad4fe7934c0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.922132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "49fd766a-d798-415e-b5eb-4ad4fe7934c0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.930592] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d49ad27-498d-432c-8b89-b528e1ff3599 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 751.940277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "d7ddbbe2-2bea-4fa1-938c-a344f49f0178" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.940277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "d7ddbbe2-2bea-4fa1-938c-a344f49f0178" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.949364] env[62619]: DEBUG nova.compute.provider_tree [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 751.977249] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "42e05759-742f-4732-97cb-cab2cfb06996" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 751.977485] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "42e05759-742f-4732-97cb-cab2cfb06996" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 751.989576] env[62619]: DEBUG oslo_concurrency.lockutils [None req-898fc4d0-a669-4334-86b9-092c30077e08 tempest-ServersTestBootFromVolume-1362569340 tempest-ServersTestBootFromVolume-1362569340-project-member] Lock "ca4b34e4-3c70-45e0-83ba-fe80d7ff96ab" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 129.263s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.169802] env[62619]: DEBUG nova.compute.manager [req-b483b99c-8ea8-448d-abc3-ae72f65593da req-ebe80707-78b3-49c7-9ea8-506ac41cb95f service nova] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Received event network-changed-db858b6e-cad6-4091-9388-8d468e6eeaa7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 752.170106] env[62619]: DEBUG nova.compute.manager [req-b483b99c-8ea8-448d-abc3-ae72f65593da req-ebe80707-78b3-49c7-9ea8-506ac41cb95f service nova] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Refreshing instance network info cache due to event network-changed-db858b6e-cad6-4091-9388-8d468e6eeaa7. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 752.170440] env[62619]: DEBUG oslo_concurrency.lockutils [req-b483b99c-8ea8-448d-abc3-ae72f65593da req-ebe80707-78b3-49c7-9ea8-506ac41cb95f service nova] Acquiring lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.170440] env[62619]: DEBUG oslo_concurrency.lockutils [req-b483b99c-8ea8-448d-abc3-ae72f65593da req-ebe80707-78b3-49c7-9ea8-506ac41cb95f service nova] Acquired lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 752.171233] env[62619]: DEBUG nova.network.neutron [req-b483b99c-8ea8-448d-abc3-ae72f65593da req-ebe80707-78b3-49c7-9ea8-506ac41cb95f service nova] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Refreshing network info cache for port db858b6e-cad6-4091-9388-8d468e6eeaa7 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 752.356372] env[62619]: ERROR nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. [ 752.356372] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 752.356372] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.356372] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 752.356372] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 752.356372] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 752.356372] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 752.356372] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 752.356372] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.356372] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 752.356372] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.356372] env[62619]: ERROR nova.compute.manager raise self.value [ 752.356372] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 752.356372] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 752.356372] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.356372] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 752.357922] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.357922] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 752.357922] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. [ 752.357922] env[62619]: ERROR nova.compute.manager [ 752.357922] env[62619]: Traceback (most recent call last): [ 752.357922] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 752.357922] env[62619]: listener.cb(fileno) [ 752.357922] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 752.357922] env[62619]: result = function(*args, **kwargs) [ 752.357922] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 752.357922] env[62619]: return func(*args, **kwargs) [ 752.357922] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 752.357922] env[62619]: raise e [ 752.357922] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.357922] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 752.357922] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 752.357922] env[62619]: created_port_ids = self._update_ports_for_instance( [ 752.357922] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 752.357922] env[62619]: with excutils.save_and_reraise_exception(): [ 752.357922] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.357922] env[62619]: self.force_reraise() [ 752.357922] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.357922] env[62619]: raise self.value [ 752.357922] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 752.357922] env[62619]: updated_port = self._update_port( [ 752.357922] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.357922] env[62619]: _ensure_no_port_binding_failure(port) [ 752.357922] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.357922] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 752.358915] env[62619]: nova.exception.PortBindingFailed: Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. [ 752.358915] env[62619]: Removing descriptor: 18 [ 752.453753] env[62619]: DEBUG nova.scheduler.client.report [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 752.493206] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 752.569749] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 752.599230] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 752.599354] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 752.599638] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 752.599731] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 752.600335] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 752.600335] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 752.600335] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 752.600502] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 752.600536] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 752.600795] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 752.600904] env[62619]: DEBUG nova.virt.hardware [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 752.602323] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fbacf00-4be5-44a7-919f-1ce1004333d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.610265] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5d00780-23fb-434c-8ca6-6d8451666616 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 752.623820] env[62619]: ERROR nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Traceback (most recent call last): [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] yield resources [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self.driver.spawn(context, instance, image_meta, [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] vm_ref = self.build_virtual_machine(instance, [ 752.623820] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] vif_infos = vmwarevif.get_vif_info(self._session, [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] for vif in network_info: [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] return self._sync_wrapper(fn, *args, **kwargs) [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self.wait() [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self[:] = self._gt.wait() [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] return self._exit_event.wait() [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 752.624258] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] current.throw(*self._exc) [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] result = function(*args, **kwargs) [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] return func(*args, **kwargs) [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] raise e [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] nwinfo = self.network_api.allocate_for_instance( [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] created_port_ids = self._update_ports_for_instance( [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] with excutils.save_and_reraise_exception(): [ 752.624696] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self.force_reraise() [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] raise self.value [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] updated_port = self._update_port( [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] _ensure_no_port_binding_failure(port) [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] raise exception.PortBindingFailed(port_id=port['id']) [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] nova.exception.PortBindingFailed: Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. [ 752.625166] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] [ 752.625166] env[62619]: INFO nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Terminating instance [ 752.626661] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Acquiring lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 752.689268] env[62619]: DEBUG nova.network.neutron [req-b483b99c-8ea8-448d-abc3-ae72f65593da req-ebe80707-78b3-49c7-9ea8-506ac41cb95f service nova] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 752.767297] env[62619]: DEBUG nova.network.neutron [req-b483b99c-8ea8-448d-abc3-ae72f65593da req-ebe80707-78b3-49c7-9ea8-506ac41cb95f service nova] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 752.958110] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 752.958701] env[62619]: DEBUG nova.compute.manager [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 752.961647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.163s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 752.963996] env[62619]: INFO nova.compute.claims [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 753.017267] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 753.271031] env[62619]: DEBUG oslo_concurrency.lockutils [req-b483b99c-8ea8-448d-abc3-ae72f65593da req-ebe80707-78b3-49c7-9ea8-506ac41cb95f service nova] Releasing lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 753.271031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Acquired lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 753.271031] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 753.467825] env[62619]: DEBUG nova.compute.utils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 753.471028] env[62619]: DEBUG nova.compute.manager [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 753.550955] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 753.551279] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 753.791861] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 753.930246] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 753.971644] env[62619]: DEBUG nova.compute.manager [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 754.058177] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 754.058329] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 754.200653] env[62619]: DEBUG nova.compute.manager [req-5c6ffb8f-1dba-45e4-a9d7-0b30c3e850aa req-de1c5554-8f29-43b2-8f95-2a9cc4ed3df7 service nova] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Received event network-vif-deleted-db858b6e-cad6-4091-9388-8d468e6eeaa7 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 754.270048] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b71ea04-a51b-4c98-bd15-7c485529ab25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.277749] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95f3d99c-b35c-4973-883b-6c272ad6fe3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.306942] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e73b8f3-2580-41c3-b558-6d94bea7de61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.315087] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db02dbd-0903-45f8-89a0-d7a9e5c67edd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.328599] env[62619]: DEBUG nova.compute.provider_tree [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.436021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Releasing lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 754.436021] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 754.436021] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 754.436021] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-83bb710b-a15a-4892-a7d5-f5d13c5674a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.446898] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32e32966-97a7-430b-9747-a6048c5387ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.467862] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 695dfaa6-8e34-4426-b025-6ce7e4e3174c could not be found. [ 754.468103] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 754.468285] env[62619]: INFO nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Took 0.03 seconds to destroy the instance on the hypervisor. [ 754.468528] env[62619]: DEBUG oslo.service.loopingcall [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 754.468772] env[62619]: DEBUG nova.compute.manager [-] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 754.468872] env[62619]: DEBUG nova.network.neutron [-] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 754.491255] env[62619]: DEBUG nova.network.neutron [-] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 754.832574] env[62619]: DEBUG nova.scheduler.client.report [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 754.984902] env[62619]: DEBUG nova.compute.manager [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 754.998043] env[62619]: DEBUG nova.network.neutron [-] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.009168] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 755.009414] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 755.009565] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 755.009752] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 755.009923] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 755.010124] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 755.010278] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 755.010441] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 755.010599] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 755.010754] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 755.010919] env[62619]: DEBUG nova.virt.hardware [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 755.011791] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be6bee9-5172-46f0-a979-c3871f56053d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.020344] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-889ee8f5-dcb3-43dc-9c0a-8250b2c56389 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.033947] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 755.039337] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Creating folder: Project (01538a2087c34a9cb6913108916ea478). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.039608] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-70c12263-168c-4002-8972-eba6aa6e718a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.049092] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Created folder: Project (01538a2087c34a9cb6913108916ea478) in parent group-v290436. [ 755.049300] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Creating folder: Instances. Parent ref: group-v290456. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 755.049476] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ccfa4bd4-08f3-44d5-ab4f-dd83378a16c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.057450] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Created folder: Instances in parent group-v290456. [ 755.057632] env[62619]: DEBUG oslo.service.loopingcall [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.057802] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 755.057982] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-11c4cbc7-4a04-480b-b852-f1ae7a83d2bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.070009] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 755.070198] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.070739] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.070956] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.071180] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.071340] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.071498] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.071630] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 755.071798] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 755.074052] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 755.074052] env[62619]: value = "task-1364870" [ 755.074052] env[62619]: _type = "Task" [ 755.074052] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.081176] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364870, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.341019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 755.341019] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 755.343548] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.033s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 755.501633] env[62619]: INFO nova.compute.manager [-] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Took 1.03 seconds to deallocate network for instance. [ 755.504730] env[62619]: DEBUG nova.compute.claims [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 755.504730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.574941] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 755.585287] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364870, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.848760] env[62619]: DEBUG nova.compute.utils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 755.850219] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 755.850376] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 755.901375] env[62619]: DEBUG nova.policy [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd738f7981e8d4452bfa83a661fd4acb4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '988b875c01fb4d98a925ceca69089777', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 756.085067] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364870, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.121690] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbc74ab7-e47f-4626-a4fd-3a95deaec38d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.128810] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cd1cd5-5f57-47cf-addc-e3163e62c449 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.157639] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e6f88e6-4968-4b60-88b8-21c9919a22ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.165232] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52dac6d2-2c42-4ddb-8592-2a94c833605d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.178307] env[62619]: DEBUG nova.compute.provider_tree [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.292917] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Successfully created port: f2f68fb9-7251-4077-bd6b-f5696880cac6 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 756.357820] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 756.593719] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364870, 'name': CreateVM_Task, 'duration_secs': 1.263107} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 756.593907] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 756.594652] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.594825] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.595518] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 756.595791] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c4efb1c3-76ac-4b02-aabf-e388a94ed93e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.606016] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 756.606016] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52eefeee-63b6-c797-ba06-1eee27f0f5bf" [ 756.606016] env[62619]: _type = "Task" [ 756.606016] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 756.614878] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52eefeee-63b6-c797-ba06-1eee27f0f5bf, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 756.680785] env[62619]: DEBUG nova.scheduler.client.report [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 756.987657] env[62619]: DEBUG nova.compute.manager [req-37db5aa1-e360-482c-9632-db95ac568ee7 req-609b5e4a-5f91-4bc8-8a18-61177b80472f service nova] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Received event network-changed-f2f68fb9-7251-4077-bd6b-f5696880cac6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 756.987860] env[62619]: DEBUG nova.compute.manager [req-37db5aa1-e360-482c-9632-db95ac568ee7 req-609b5e4a-5f91-4bc8-8a18-61177b80472f service nova] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Refreshing instance network info cache due to event network-changed-f2f68fb9-7251-4077-bd6b-f5696880cac6. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 756.988091] env[62619]: DEBUG oslo_concurrency.lockutils [req-37db5aa1-e360-482c-9632-db95ac568ee7 req-609b5e4a-5f91-4bc8-8a18-61177b80472f service nova] Acquiring lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 756.988234] env[62619]: DEBUG oslo_concurrency.lockutils [req-37db5aa1-e360-482c-9632-db95ac568ee7 req-609b5e4a-5f91-4bc8-8a18-61177b80472f service nova] Acquired lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 756.988433] env[62619]: DEBUG nova.network.neutron [req-37db5aa1-e360-482c-9632-db95ac568ee7 req-609b5e4a-5f91-4bc8-8a18-61177b80472f service nova] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Refreshing network info cache for port f2f68fb9-7251-4077-bd6b-f5696880cac6 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 757.118420] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52eefeee-63b6-c797-ba06-1eee27f0f5bf, 'name': SearchDatastore_Task, 'duration_secs': 0.010384} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.118778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.119038] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 757.119303] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.119454] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.119627] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 757.119923] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2ff4ce67-2da2-4c8a-a425-2e27cd6da974 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.128171] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 757.128345] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 757.129068] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-32158fac-f86a-40ee-8530-1e7c2de78849 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.137255] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 757.137255] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52cd7234-aeb5-5918-5599-d5ecc3631c92" [ 757.137255] env[62619]: _type = "Task" [ 757.137255] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.150708] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52cd7234-aeb5-5918-5599-d5ecc3631c92, 'name': SearchDatastore_Task, 'duration_secs': 0.008324} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.151622] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-666adcbc-a967-4e8c-bd2b-edb68d132981 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.157613] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 757.157613] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529fcc0b-7fa6-abfa-636d-fcf88501ff6a" [ 757.157613] env[62619]: _type = "Task" [ 757.157613] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.158443] env[62619]: ERROR nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. [ 757.158443] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 757.158443] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.158443] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 757.158443] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.158443] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 757.158443] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.158443] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 757.158443] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.158443] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 757.158443] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.158443] env[62619]: ERROR nova.compute.manager raise self.value [ 757.158443] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.158443] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 757.158443] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.158443] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 757.159196] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.159196] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 757.159196] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. [ 757.159196] env[62619]: ERROR nova.compute.manager [ 757.159196] env[62619]: Traceback (most recent call last): [ 757.159196] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 757.159196] env[62619]: listener.cb(fileno) [ 757.159196] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.159196] env[62619]: result = function(*args, **kwargs) [ 757.159196] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.159196] env[62619]: return func(*args, **kwargs) [ 757.159196] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.159196] env[62619]: raise e [ 757.159196] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.159196] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 757.159196] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.159196] env[62619]: created_port_ids = self._update_ports_for_instance( [ 757.159196] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.159196] env[62619]: with excutils.save_and_reraise_exception(): [ 757.159196] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.159196] env[62619]: self.force_reraise() [ 757.159196] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.159196] env[62619]: raise self.value [ 757.159196] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.159196] env[62619]: updated_port = self._update_port( [ 757.159196] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.159196] env[62619]: _ensure_no_port_binding_failure(port) [ 757.159196] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.159196] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 757.160480] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. [ 757.160480] env[62619]: Removing descriptor: 18 [ 757.168552] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529fcc0b-7fa6-abfa-636d-fcf88501ff6a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.186806] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.843s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 757.190121] env[62619]: ERROR nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Traceback (most recent call last): [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self.driver.spawn(context, instance, image_meta, [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] vm_ref = self.build_virtual_machine(instance, [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.190121] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] for vif in network_info: [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] return self._sync_wrapper(fn, *args, **kwargs) [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self.wait() [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self[:] = self._gt.wait() [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] return self._exit_event.wait() [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] current.throw(*self._exc) [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.190465] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] result = function(*args, **kwargs) [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] return func(*args, **kwargs) [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] raise e [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] nwinfo = self.network_api.allocate_for_instance( [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] created_port_ids = self._update_ports_for_instance( [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] with excutils.save_and_reraise_exception(): [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] self.force_reraise() [ 757.190830] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] raise self.value [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] updated_port = self._update_port( [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] _ensure_no_port_binding_failure(port) [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] raise exception.PortBindingFailed(port_id=port['id']) [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] nova.exception.PortBindingFailed: Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. [ 757.191205] env[62619]: ERROR nova.compute.manager [instance: 73145811-c355-462e-9a8e-ffccf2efe683] [ 757.191205] env[62619]: DEBUG nova.compute.utils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 757.191492] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.221s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 757.191492] env[62619]: INFO nova.compute.claims [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 757.197351] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Build of instance 73145811-c355-462e-9a8e-ffccf2efe683 was re-scheduled: Binding failed for port 7669df5b-17ac-4bff-91cb-982f857bd13c, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 757.197781] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 757.198008] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.198160] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 757.198316] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 757.375605] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 757.402137] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 757.402399] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 757.402561] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.402741] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 757.402887] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.403095] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 757.403335] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 757.403499] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 757.403660] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 757.403817] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 757.403989] env[62619]: DEBUG nova.virt.hardware [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.404975] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bdd704c-c288-45b3-8f63-4184a69f9329 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.413167] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4fe99b-0904-424d-aa02-f180dbfacec3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.428314] env[62619]: ERROR nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Traceback (most recent call last): [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] yield resources [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self.driver.spawn(context, instance, image_meta, [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] vm_ref = self.build_virtual_machine(instance, [ 757.428314] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] for vif in network_info: [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] return self._sync_wrapper(fn, *args, **kwargs) [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self.wait() [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self[:] = self._gt.wait() [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] return self._exit_event.wait() [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 757.429047] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] current.throw(*self._exc) [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] result = function(*args, **kwargs) [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] return func(*args, **kwargs) [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] raise e [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] nwinfo = self.network_api.allocate_for_instance( [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] created_port_ids = self._update_ports_for_instance( [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] with excutils.save_and_reraise_exception(): [ 757.429501] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self.force_reraise() [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] raise self.value [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] updated_port = self._update_port( [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] _ensure_no_port_binding_failure(port) [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] raise exception.PortBindingFailed(port_id=port['id']) [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] nova.exception.PortBindingFailed: Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. [ 757.429960] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] [ 757.429960] env[62619]: INFO nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Terminating instance [ 757.431008] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Acquiring lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 757.510538] env[62619]: DEBUG nova.network.neutron [req-37db5aa1-e360-482c-9632-db95ac568ee7 req-609b5e4a-5f91-4bc8-8a18-61177b80472f service nova] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.589020] env[62619]: DEBUG nova.network.neutron [req-37db5aa1-e360-482c-9632-db95ac568ee7 req-609b5e4a-5f91-4bc8-8a18-61177b80472f service nova] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 757.669230] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529fcc0b-7fa6-abfa-636d-fcf88501ff6a, 'name': SearchDatastore_Task, 'duration_secs': 0.008795} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 757.669503] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 757.669754] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe/dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 757.670038] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fbf52b7b-5107-407a-9478-5ee29586b614 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.676640] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 757.676640] env[62619]: value = "task-1364871" [ 757.676640] env[62619]: _type = "Task" [ 757.676640] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 757.686187] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364871, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 757.724180] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 757.801371] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.092292] env[62619]: DEBUG oslo_concurrency.lockutils [req-37db5aa1-e360-482c-9632-db95ac568ee7 req-609b5e4a-5f91-4bc8-8a18-61177b80472f service nova] Releasing lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.092292] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Acquired lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 758.092292] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.186438] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364871, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.48175} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.186733] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe/dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 758.186978] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 758.187243] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-59c41316-0b05-4a38-8e24-eff8706744c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.193491] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 758.193491] env[62619]: value = "task-1364872" [ 758.193491] env[62619]: _type = "Task" [ 758.193491] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.200443] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364872, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.306064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-73145811-c355-462e-9a8e-ffccf2efe683" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 758.306303] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 758.306686] env[62619]: DEBUG nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 758.306686] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 758.324213] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.537932] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a62f7eec-fcd4-4c21-a6a9-7c18489f16e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.550520] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8df7b9a2-6d3a-42cd-a256-b34496ced2f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.580749] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-665e7dc2-5376-423b-a48d-450cff92d41a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.589549] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-112b4fa2-a894-4bec-b278-179de0b03ba9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.605326] env[62619]: DEBUG nova.compute.provider_tree [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 758.627212] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.708389] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364872, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.067917} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.708389] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.708523] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 758.709497] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225013ce-08b9-493c-8804-77b3c09a404d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.728798] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Reconfiguring VM instance instance-0000002f to attach disk [datastore1] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe/dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 758.729686] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6b413c5-432d-4870-967e-fdf7ffe70eed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.751196] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 758.751196] env[62619]: value = "task-1364873" [ 758.751196] env[62619]: _type = "Task" [ 758.751196] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.759384] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364873, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.827913] env[62619]: DEBUG nova.network.neutron [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.025056] env[62619]: DEBUG nova.compute.manager [req-23d89ef1-5409-4385-ae70-1ed3488f1af8 req-8d8926c9-9961-4c72-bec6-12dde88416cc service nova] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Received event network-vif-deleted-f2f68fb9-7251-4077-bd6b-f5696880cac6 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 759.109686] env[62619]: DEBUG nova.scheduler.client.report [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 759.212939] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Releasing lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 759.212939] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 759.212939] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 759.212939] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6add6eb8-a7e8-4b06-b580-9e4f35bac05e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.221291] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12480985-7a50-4ff5-bf6c-b6d4b460dfed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.243870] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eda91c9e-886e-468e-b9eb-0435c1e94cd3 could not be found. [ 759.244247] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 759.244340] env[62619]: INFO nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Took 0.03 seconds to destroy the instance on the hypervisor. [ 759.244568] env[62619]: DEBUG oslo.service.loopingcall [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 759.244790] env[62619]: DEBUG nova.compute.manager [-] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 759.244887] env[62619]: DEBUG nova.network.neutron [-] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 759.259399] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364873, 'name': ReconfigVM_Task, 'duration_secs': 0.275224} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.259399] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Reconfigured VM instance instance-0000002f to attach disk [datastore1] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe/dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 759.259990] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-48c82d98-e8ff-447c-bc96-67a9566fb538 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.262111] env[62619]: DEBUG nova.network.neutron [-] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 759.269040] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 759.269040] env[62619]: value = "task-1364874" [ 759.269040] env[62619]: _type = "Task" [ 759.269040] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.276008] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364874, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.331581] env[62619]: INFO nova.compute.manager [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 73145811-c355-462e-9a8e-ffccf2efe683] Took 1.02 seconds to deallocate network for instance. [ 759.616841] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.424s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 759.616841] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 759.617904] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 15.165s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 759.764832] env[62619]: DEBUG nova.network.neutron [-] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 759.780875] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364874, 'name': Rename_Task, 'duration_secs': 0.14235} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 759.781678] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 759.781678] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b8bbade0-d934-41c7-ae4d-11fc2f4cf6d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 759.787759] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 759.787759] env[62619]: value = "task-1364878" [ 759.787759] env[62619]: _type = "Task" [ 759.787759] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 759.795763] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364878, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 760.125196] env[62619]: DEBUG nova.compute.utils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 760.126807] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 760.127014] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 760.170034] env[62619]: DEBUG nova.policy [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'feedab01cdd549a7b6e58e3d3748ee4b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '79dee86a35c04b4c921c57ffda77d654', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 760.270320] env[62619]: INFO nova.compute.manager [-] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Took 1.03 seconds to deallocate network for instance. [ 760.277352] env[62619]: DEBUG nova.compute.claims [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 760.277551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 760.299085] env[62619]: DEBUG oslo_vmware.api [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364878, 'name': PowerOnVM_Task, 'duration_secs': 0.428582} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 760.301217] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 760.301414] env[62619]: INFO nova.compute.manager [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Took 5.32 seconds to spawn the instance on the hypervisor. [ 760.301582] env[62619]: DEBUG nova.compute.manager [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 760.302550] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe5b7648-e376-4442-afdc-b85508345a98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.370610] env[62619]: INFO nova.scheduler.client.report [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted allocations for instance 73145811-c355-462e-9a8e-ffccf2efe683 [ 760.434759] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98d4da3-4e2d-401b-9d41-908b49c0141d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.442867] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-504c7fab-9b63-42e6-b4a2-5d75ee98bc6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.473082] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Successfully created port: c0e0a74b-ce7a-4e90-8465-6999f72205cd {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 760.475250] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e26087b0-ef61-44ef-96c9-3411d81aae9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.483666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82e9c20-c9a3-4c43-bedb-63867cb4595f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 760.496277] env[62619]: DEBUG nova.compute.provider_tree [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 760.629881] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 760.831964] env[62619]: INFO nova.compute.manager [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Took 24.61 seconds to build instance. [ 760.885170] env[62619]: DEBUG oslo_concurrency.lockutils [None req-02376305-c722-465f-b920-884da91cf11c tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "73145811-c355-462e-9a8e-ffccf2efe683" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 137.438s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 760.999730] env[62619]: DEBUG nova.scheduler.client.report [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 761.335525] env[62619]: DEBUG oslo_concurrency.lockutils [None req-fc00afe8-7a6a-49ff-8fd6-885b9e4f92a8 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 101.529s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.342507] env[62619]: DEBUG nova.compute.manager [req-3ddf9990-4b63-4567-8aa6-a528867a82eb req-31c7b9a4-1993-41b4-9269-d267673ded1c service nova] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Received event network-changed-c0e0a74b-ce7a-4e90-8465-6999f72205cd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 761.342902] env[62619]: DEBUG nova.compute.manager [req-3ddf9990-4b63-4567-8aa6-a528867a82eb req-31c7b9a4-1993-41b4-9269-d267673ded1c service nova] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Refreshing instance network info cache due to event network-changed-c0e0a74b-ce7a-4e90-8465-6999f72205cd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 761.342951] env[62619]: DEBUG oslo_concurrency.lockutils [req-3ddf9990-4b63-4567-8aa6-a528867a82eb req-31c7b9a4-1993-41b4-9269-d267673ded1c service nova] Acquiring lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.343477] env[62619]: DEBUG oslo_concurrency.lockutils [req-3ddf9990-4b63-4567-8aa6-a528867a82eb req-31c7b9a4-1993-41b4-9269-d267673ded1c service nova] Acquired lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.343477] env[62619]: DEBUG nova.network.neutron [req-3ddf9990-4b63-4567-8aa6-a528867a82eb req-31c7b9a4-1993-41b4-9269-d267673ded1c service nova] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Refreshing network info cache for port c0e0a74b-ce7a-4e90-8465-6999f72205cd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 761.345447] env[62619]: ERROR nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. [ 761.345447] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 761.345447] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.345447] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 761.345447] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 761.345447] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 761.345447] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 761.345447] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 761.345447] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.345447] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 761.345447] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.345447] env[62619]: ERROR nova.compute.manager raise self.value [ 761.345447] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 761.345447] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 761.345447] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.345447] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 761.346544] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.346544] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 761.346544] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. [ 761.346544] env[62619]: ERROR nova.compute.manager [ 761.346544] env[62619]: Traceback (most recent call last): [ 761.346544] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 761.346544] env[62619]: listener.cb(fileno) [ 761.346544] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.346544] env[62619]: result = function(*args, **kwargs) [ 761.346544] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 761.346544] env[62619]: return func(*args, **kwargs) [ 761.346544] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.346544] env[62619]: raise e [ 761.346544] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.346544] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 761.346544] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 761.346544] env[62619]: created_port_ids = self._update_ports_for_instance( [ 761.346544] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 761.346544] env[62619]: with excutils.save_and_reraise_exception(): [ 761.346544] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.346544] env[62619]: self.force_reraise() [ 761.346544] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.346544] env[62619]: raise self.value [ 761.346544] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 761.346544] env[62619]: updated_port = self._update_port( [ 761.346544] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.346544] env[62619]: _ensure_no_port_binding_failure(port) [ 761.346544] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.346544] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 761.347470] env[62619]: nova.exception.PortBindingFailed: Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. [ 761.347470] env[62619]: Removing descriptor: 18 [ 761.386144] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 761.506025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.887s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 761.506025] env[62619]: ERROR nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. [ 761.506025] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Traceback (most recent call last): [ 761.506025] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 761.506025] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self.driver.spawn(context, instance, image_meta, [ 761.506025] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 761.506025] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.506025] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 761.506025] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] vm_ref = self.build_virtual_machine(instance, [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] vif_infos = vmwarevif.get_vif_info(self._session, [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] for vif in network_info: [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] return self._sync_wrapper(fn, *args, **kwargs) [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self.wait() [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self[:] = self._gt.wait() [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] return self._exit_event.wait() [ 761.506419] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] current.throw(*self._exc) [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] result = function(*args, **kwargs) [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] return func(*args, **kwargs) [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] raise e [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] nwinfo = self.network_api.allocate_for_instance( [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] created_port_ids = self._update_ports_for_instance( [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 761.506807] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] with excutils.save_and_reraise_exception(): [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] self.force_reraise() [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] raise self.value [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] updated_port = self._update_port( [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] _ensure_no_port_binding_failure(port) [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] raise exception.PortBindingFailed(port_id=port['id']) [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] nova.exception.PortBindingFailed: Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. [ 761.507322] env[62619]: ERROR nova.compute.manager [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] [ 761.507692] env[62619]: DEBUG nova.compute.utils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 761.508319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.226s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 761.509801] env[62619]: INFO nova.compute.claims [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 761.514845] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Build of instance c307cc2f-d0c9-49ab-aafa-768a34199f0c was re-scheduled: Binding failed for port ee20bfdf-1d5a-43cd-8098-8c97aa9ff954, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 761.514845] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 761.514845] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.514845] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 761.515397] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 761.640677] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 761.662528] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 761.663641] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 761.663641] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 761.663641] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 761.663641] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 761.663641] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 761.663932] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 761.664145] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 761.664359] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 761.664561] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 761.664774] env[62619]: DEBUG nova.virt.hardware [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 761.665948] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd9eb14e-19ba-431a-8e7a-6bc45979f636 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.676171] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef3222a9-f4e6-4ac9-bc2c-c5f0e3830168 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.690244] env[62619]: INFO nova.compute.manager [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Rebuilding instance [ 761.692995] env[62619]: ERROR nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Traceback (most recent call last): [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] yield resources [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self.driver.spawn(context, instance, image_meta, [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self._vmops.spawn(context, instance, image_meta, injected_files, [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] vm_ref = self.build_virtual_machine(instance, [ 761.692995] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] vif_infos = vmwarevif.get_vif_info(self._session, [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] for vif in network_info: [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] return self._sync_wrapper(fn, *args, **kwargs) [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self.wait() [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self[:] = self._gt.wait() [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] return self._exit_event.wait() [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 761.693481] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] current.throw(*self._exc) [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] result = function(*args, **kwargs) [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] return func(*args, **kwargs) [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] raise e [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] nwinfo = self.network_api.allocate_for_instance( [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] created_port_ids = self._update_ports_for_instance( [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] with excutils.save_and_reraise_exception(): [ 761.693944] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self.force_reraise() [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] raise self.value [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] updated_port = self._update_port( [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] _ensure_no_port_binding_failure(port) [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] raise exception.PortBindingFailed(port_id=port['id']) [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] nova.exception.PortBindingFailed: Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. [ 761.694480] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] [ 761.694480] env[62619]: INFO nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Terminating instance [ 761.696874] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Acquiring lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 761.731928] env[62619]: DEBUG nova.compute.manager [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 761.731928] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3627021b-b1e5-4639-af18-2af423a0b747 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 761.837492] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 761.863898] env[62619]: DEBUG nova.network.neutron [req-3ddf9990-4b63-4567-8aa6-a528867a82eb req-31c7b9a4-1993-41b4-9269-d267673ded1c service nova] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 761.909754] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.959341] env[62619]: DEBUG nova.network.neutron [req-3ddf9990-4b63-4567-8aa6-a528867a82eb req-31c7b9a4-1993-41b4-9269-d267673ded1c service nova] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 761.981325] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "30045f41-3396-47cb-833d-b5b434c3671b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 761.981865] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "30045f41-3396-47cb-833d-b5b434c3671b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 762.031064] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 762.107200] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 762.242724] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 762.243115] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7638abdf-b7b7-4f90-a678-30cc6aa93398 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.250127] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 762.250127] env[62619]: value = "task-1364879" [ 762.250127] env[62619]: _type = "Task" [ 762.250127] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.259876] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364879, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.363302] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 762.461861] env[62619]: DEBUG oslo_concurrency.lockutils [req-3ddf9990-4b63-4567-8aa6-a528867a82eb req-31c7b9a4-1993-41b4-9269-d267673ded1c service nova] Releasing lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.462341] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Acquired lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 762.462533] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 762.610442] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-c307cc2f-d0c9-49ab-aafa-768a34199f0c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 762.610679] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 762.610861] env[62619]: DEBUG nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 762.611038] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 762.630427] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 762.764181] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364879, 'name': PowerOffVM_Task, 'duration_secs': 0.132561} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 762.764181] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 762.764518] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 762.765179] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37fc7577-1b27-4e80-b4f9-4eb1de0d16b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.775313] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 762.775575] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c9853220-a98a-4463-8b4f-54fce8ea5757 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.805162] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-313ca5d5-f1ba-402e-b010-cc117850a90b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.815132] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e3bc4af-47fc-4945-806a-999047cd0828 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.844757] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f21aca1a-afe9-468c-bece-a1a2febf1838 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.852976] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ca06cac-eaee-4174-94a7-255d6262044f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.868579] env[62619]: DEBUG nova.compute.provider_tree [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 762.871116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 762.871116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 762.871239] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Deleting the datastore file [datastore1] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 762.871781] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31111843-72fb-4730-b769-df48367b1e02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 762.879260] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 762.879260] env[62619]: value = "task-1364882" [ 762.879260] env[62619]: _type = "Task" [ 762.879260] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 762.889242] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364882, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 762.981906] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.064645] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.133107] env[62619]: DEBUG nova.network.neutron [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 763.368933] env[62619]: DEBUG nova.compute.manager [req-bb8e1549-4577-4712-8d24-49010982a8e4 req-4e426ea6-4a22-431c-925b-778a5a51cca2 service nova] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Received event network-vif-deleted-c0e0a74b-ce7a-4e90-8465-6999f72205cd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 763.372352] env[62619]: DEBUG nova.scheduler.client.report [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 763.391851] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364882, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.101503} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 763.392705] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 763.392898] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 763.393997] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.566104] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Releasing lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 763.567273] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 763.567369] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 763.567691] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-13db0bc5-5975-459c-b826-dba10c22af87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.578772] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50f8446c-89ca-486b-ab9b-d76f63e12eec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 763.601543] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance eedbd5eb-e431-477e-a817-acb8f54fa511 could not be found. [ 763.601773] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 763.601951] env[62619]: INFO nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Took 0.03 seconds to destroy the instance on the hypervisor. [ 763.602218] env[62619]: DEBUG oslo.service.loopingcall [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 763.602436] env[62619]: DEBUG nova.compute.manager [-] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 763.602528] env[62619]: DEBUG nova.network.neutron [-] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 763.617665] env[62619]: DEBUG nova.network.neutron [-] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 763.635600] env[62619]: INFO nova.compute.manager [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: c307cc2f-d0c9-49ab-aafa-768a34199f0c] Took 1.02 seconds to deallocate network for instance. [ 763.876692] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.368s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 763.877298] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 763.879916] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.824s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 764.120250] env[62619]: DEBUG nova.network.neutron [-] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 764.384467] env[62619]: DEBUG nova.compute.utils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 764.389140] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 764.389271] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 764.427298] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 764.427556] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 764.427773] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 764.427876] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 764.428030] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 764.428176] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 764.428379] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 764.428530] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 764.428687] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 764.428876] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 764.429090] env[62619]: DEBUG nova.virt.hardware [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 764.430388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-619c8228-1bff-4b2f-8d17-0c8f1de2afab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.433524] env[62619]: DEBUG nova.policy [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2de58eb8a07a4cd4a22c4b7dbd53a526', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '8f56ffd7008c4df68c875a8fe2591b9b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 764.443697] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-080339b6-2389-4dde-abf8-82c3f6f7cced {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.459607] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 764.465036] env[62619]: DEBUG oslo.service.loopingcall [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 764.467539] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 764.468368] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f5b190ce-68b8-46b9-8c55-345b70e5198a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.490258] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 764.490258] env[62619]: value = "task-1364884" [ 764.490258] env[62619]: _type = "Task" [ 764.490258] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 764.504200] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364884, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 764.624395] env[62619]: INFO nova.compute.manager [-] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Took 1.02 seconds to deallocate network for instance. [ 764.627280] env[62619]: DEBUG nova.compute.claims [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 764.627280] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 764.675683] env[62619]: INFO nova.scheduler.client.report [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted allocations for instance c307cc2f-d0c9-49ab-aafa-768a34199f0c [ 764.721042] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16741e13-40e7-4667-8833-4c174808c784 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.733566] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2592e5fe-5071-4880-8755-667a9b3635db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.762477] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Successfully created port: f20a72bb-fd85-4120-b420-d0c516473eed {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 764.764946] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce5c1977-536c-4cd1-b02a-7ecc59c7ce23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.773411] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f7dac05-10a7-40cc-ba99-45a5a54ac739 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 764.787797] env[62619]: DEBUG nova.compute.provider_tree [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 764.889622] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 765.001502] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364884, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.191413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-96c4eb7e-c573-4b8b-afce-5cf741ea9109 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "c307cc2f-d0c9-49ab-aafa-768a34199f0c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.382s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.290992] env[62619]: DEBUG nova.scheduler.client.report [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 765.421247] env[62619]: DEBUG nova.compute.manager [req-ecb36ffe-0022-44e0-8d80-f6fd221214ab req-bca84e05-8add-4d10-871f-fe26541b0a9c service nova] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Received event network-changed-f20a72bb-fd85-4120-b420-d0c516473eed {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 765.421516] env[62619]: DEBUG nova.compute.manager [req-ecb36ffe-0022-44e0-8d80-f6fd221214ab req-bca84e05-8add-4d10-871f-fe26541b0a9c service nova] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Refreshing instance network info cache due to event network-changed-f20a72bb-fd85-4120-b420-d0c516473eed. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 765.421756] env[62619]: DEBUG oslo_concurrency.lockutils [req-ecb36ffe-0022-44e0-8d80-f6fd221214ab req-bca84e05-8add-4d10-871f-fe26541b0a9c service nova] Acquiring lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.421906] env[62619]: DEBUG oslo_concurrency.lockutils [req-ecb36ffe-0022-44e0-8d80-f6fd221214ab req-bca84e05-8add-4d10-871f-fe26541b0a9c service nova] Acquired lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.422157] env[62619]: DEBUG nova.network.neutron [req-ecb36ffe-0022-44e0-8d80-f6fd221214ab req-bca84e05-8add-4d10-871f-fe26541b0a9c service nova] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Refreshing network info cache for port f20a72bb-fd85-4120-b420-d0c516473eed {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 765.507433] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364884, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 765.605622] env[62619]: ERROR nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. [ 765.605622] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 765.605622] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.605622] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 765.605622] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.605622] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 765.605622] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.605622] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 765.605622] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.605622] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 765.605622] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.605622] env[62619]: ERROR nova.compute.manager raise self.value [ 765.605622] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.605622] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 765.605622] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.605622] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 765.606176] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.606176] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 765.606176] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. [ 765.606176] env[62619]: ERROR nova.compute.manager [ 765.606176] env[62619]: Traceback (most recent call last): [ 765.606176] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 765.606176] env[62619]: listener.cb(fileno) [ 765.606176] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.606176] env[62619]: result = function(*args, **kwargs) [ 765.606176] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 765.606176] env[62619]: return func(*args, **kwargs) [ 765.606176] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 765.606176] env[62619]: raise e [ 765.606176] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.606176] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 765.606176] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.606176] env[62619]: created_port_ids = self._update_ports_for_instance( [ 765.606176] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.606176] env[62619]: with excutils.save_and_reraise_exception(): [ 765.606176] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.606176] env[62619]: self.force_reraise() [ 765.606176] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.606176] env[62619]: raise self.value [ 765.606176] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.606176] env[62619]: updated_port = self._update_port( [ 765.606176] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.606176] env[62619]: _ensure_no_port_binding_failure(port) [ 765.606176] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.606176] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 765.607324] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. [ 765.607324] env[62619]: Removing descriptor: 18 [ 765.694111] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 765.796519] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.917s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 765.797185] env[62619]: ERROR nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Traceback (most recent call last): [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self.driver.spawn(context, instance, image_meta, [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] vm_ref = self.build_virtual_machine(instance, [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.797185] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] for vif in network_info: [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] return self._sync_wrapper(fn, *args, **kwargs) [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self.wait() [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self[:] = self._gt.wait() [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] return self._exit_event.wait() [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] result = hub.switch() [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 765.797611] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] return self.greenlet.switch() [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] result = function(*args, **kwargs) [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] return func(*args, **kwargs) [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] raise e [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] nwinfo = self.network_api.allocate_for_instance( [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] created_port_ids = self._update_ports_for_instance( [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] with excutils.save_and_reraise_exception(): [ 765.798045] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] self.force_reraise() [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] raise self.value [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] updated_port = self._update_port( [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] _ensure_no_port_binding_failure(port) [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] raise exception.PortBindingFailed(port_id=port['id']) [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] nova.exception.PortBindingFailed: Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. [ 765.798560] env[62619]: ERROR nova.compute.manager [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] [ 765.799054] env[62619]: DEBUG nova.compute.utils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 765.799140] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.733s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 765.799355] env[62619]: DEBUG nova.objects.instance [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lazy-loading 'resources' on Instance uuid 9735d6d1-eb10-46b4-a273-10b1351033f0 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 765.801212] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Build of instance c9c375f1-dd7f-43fb-acf6-45e766a8333d was re-scheduled: Binding failed for port 3ca3421b-047d-49b3-bc8b-d41ad48edeba, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 765.802712] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 765.802712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 765.802712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquired lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 765.802712] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 765.899417] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 765.928408] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 765.928720] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 765.928923] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 765.929130] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 765.929274] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 765.929412] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 765.929612] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 765.929766] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 765.929931] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 765.930096] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 765.930262] env[62619]: DEBUG nova.virt.hardware [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 765.932824] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e74228ab-35ee-4e3e-a755-ed755b3309f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.942779] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdeb2b5d-d122-4332-8c3e-d1377fb32843 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 765.949341] env[62619]: DEBUG nova.network.neutron [req-ecb36ffe-0022-44e0-8d80-f6fd221214ab req-bca84e05-8add-4d10-871f-fe26541b0a9c service nova] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 765.959982] env[62619]: ERROR nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Traceback (most recent call last): [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] yield resources [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self.driver.spawn(context, instance, image_meta, [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self._vmops.spawn(context, instance, image_meta, injected_files, [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] vm_ref = self.build_virtual_machine(instance, [ 765.959982] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] vif_infos = vmwarevif.get_vif_info(self._session, [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] for vif in network_info: [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] return self._sync_wrapper(fn, *args, **kwargs) [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self.wait() [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self[:] = self._gt.wait() [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] return self._exit_event.wait() [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 765.960391] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] current.throw(*self._exc) [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] result = function(*args, **kwargs) [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] return func(*args, **kwargs) [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] raise e [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] nwinfo = self.network_api.allocate_for_instance( [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] created_port_ids = self._update_ports_for_instance( [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] with excutils.save_and_reraise_exception(): [ 765.960772] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self.force_reraise() [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] raise self.value [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] updated_port = self._update_port( [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] _ensure_no_port_binding_failure(port) [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] raise exception.PortBindingFailed(port_id=port['id']) [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] nova.exception.PortBindingFailed: Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. [ 765.961165] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] [ 765.961165] env[62619]: INFO nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Terminating instance [ 765.962235] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.006172] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364884, 'name': CreateVM_Task, 'duration_secs': 1.261327} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.006172] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 766.006172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.006172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.006172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 766.006459] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4236b2f4-94bf-4cc8-ae24-cf7b8d2943ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.011357] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 766.011357] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52070d55-b00d-375b-cca4-05e331a02b47" [ 766.011357] env[62619]: _type = "Task" [ 766.011357] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.019225] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52070d55-b00d-375b-cca4-05e331a02b47, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.032677] env[62619]: DEBUG nova.network.neutron [req-ecb36ffe-0022-44e0-8d80-f6fd221214ab req-bca84e05-8add-4d10-871f-fe26541b0a9c service nova] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.218059] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 766.322257] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 766.426046] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 766.525080] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52070d55-b00d-375b-cca4-05e331a02b47, 'name': SearchDatastore_Task, 'duration_secs': 0.011491} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.525080] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.525080] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 766.525337] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 766.525337] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.525753] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 766.528163] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-54d4378f-4fea-4428-9392-7477d4bb36eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.535025] env[62619]: DEBUG oslo_concurrency.lockutils [req-ecb36ffe-0022-44e0-8d80-f6fd221214ab req-bca84e05-8add-4d10-871f-fe26541b0a9c service nova] Releasing lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.535162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquired lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 766.535365] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 766.540370] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 766.540552] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 766.543931] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-584c25d8-7402-4b83-b532-c3ef71196e17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.551564] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 766.551564] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52484b11-9b80-5758-af9a-effd745a2354" [ 766.551564] env[62619]: _type = "Task" [ 766.551564] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.565549] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52484b11-9b80-5758-af9a-effd745a2354, 'name': SearchDatastore_Task, 'duration_secs': 0.012163} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 766.566365] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac44d214-6d3c-4ab5-b49a-3536abea92ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.575378] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 766.575378] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ccd9e8-6eb5-d305-fdaa-4ba4c3f3b61c" [ 766.575378] env[62619]: _type = "Task" [ 766.575378] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 766.586139] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ccd9e8-6eb5-d305-fdaa-4ba4c3f3b61c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 766.609124] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-241a0125-e7f0-43ff-b1c0-3485ba358908 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.617187] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84dbccb8-9a33-4390-924d-9d60ae6e5d2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.649695] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-170ffbbe-557b-4b70-9812-9aa6273d9bc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.658160] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-194bca82-3786-4ebf-b6b4-0b5d792a5a2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 766.671669] env[62619]: DEBUG nova.compute.provider_tree [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 766.931452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Releasing lock "refresh_cache-c9c375f1-dd7f-43fb-acf6-45e766a8333d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 766.931452] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 766.931452] env[62619]: DEBUG nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 766.931452] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 766.947423] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.053215] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.085994] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ccd9e8-6eb5-d305-fdaa-4ba4c3f3b61c, 'name': SearchDatastore_Task, 'duration_secs': 0.012608} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 767.086308] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.086570] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe/dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 767.086906] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-47b852d3-6076-4e77-8f8c-536c9990fb3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.097682] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 767.097682] env[62619]: value = "task-1364886" [ 767.097682] env[62619]: _type = "Task" [ 767.097682] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 767.105509] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364886, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.124392] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.175315] env[62619]: DEBUG nova.scheduler.client.report [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 767.446386] env[62619]: DEBUG nova.compute.manager [req-eeb97996-4e88-457b-9746-074f8f597116 req-77706f7e-cec6-4743-ae56-215a3beb0436 service nova] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Received event network-vif-deleted-f20a72bb-fd85-4120-b420-d0c516473eed {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 767.450517] env[62619]: DEBUG nova.network.neutron [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 767.503252] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "9fd66533-39ff-401d-81ef-f37eaceb3103" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 767.503504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.610314] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364886, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 767.627169] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Releasing lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 767.627613] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 767.627826] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 767.628146] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5290faff-3ad8-4c3a-b681-2ae0e9803554 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.637564] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05ebbd92-b44f-44fd-8043-456d24f5c5bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 767.666274] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5ffd77a0-df9a-461c-837e-05b4ff66ea52 could not be found. [ 767.666626] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 767.666887] env[62619]: INFO nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Took 0.04 seconds to destroy the instance on the hypervisor. [ 767.667273] env[62619]: DEBUG oslo.service.loopingcall [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 767.667589] env[62619]: DEBUG nova.compute.manager [-] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 767.667711] env[62619]: DEBUG nova.network.neutron [-] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 767.680035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.881s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 767.682861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.116s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 767.687401] env[62619]: DEBUG nova.network.neutron [-] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 767.700253] env[62619]: INFO nova.scheduler.client.report [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Deleted allocations for instance 9735d6d1-eb10-46b4-a273-10b1351033f0 [ 767.952821] env[62619]: INFO nova.compute.manager [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: c9c375f1-dd7f-43fb-acf6-45e766a8333d] Took 1.02 seconds to deallocate network for instance. [ 768.108807] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364886, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533758} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.109178] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe/dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 768.109394] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 768.109643] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-065b8d00-6b5e-4bb8-b7bf-f1cb1659b0ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.120155] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 768.120155] env[62619]: value = "task-1364887" [ 768.120155] env[62619]: _type = "Task" [ 768.120155] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.128956] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364887, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.193028] env[62619]: DEBUG nova.network.neutron [-] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 768.207773] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ea2a5a9-0094-45a0-a39b-7d58d0b9ed4f tempest-ServerDiagnosticsV248Test-1118028056 tempest-ServerDiagnosticsV248Test-1118028056-project-member] Lock "9735d6d1-eb10-46b4-a273-10b1351033f0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.883s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 768.417192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e980110-6150-4d5f-afb4-e0d66072ccf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.425672] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfbfe8a2-7c4b-41a0-9059-2657019fe9d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.455975] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a566d1f4-73ce-4901-acee-56215483f3ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.466666] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02290177-67c5-4bb3-ac69-925546f619ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.480209] env[62619]: DEBUG nova.compute.provider_tree [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 768.630466] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364887, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.069277} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 768.630759] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 768.631538] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7a94ef-d60b-4f1f-b651-00c0a324f1b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.652367] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Reconfiguring VM instance instance-0000002f to attach disk [datastore2] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe/dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 768.652658] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9fe29983-6939-410a-be87-b82103393a63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 768.671959] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 768.671959] env[62619]: value = "task-1364889" [ 768.671959] env[62619]: _type = "Task" [ 768.671959] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 768.683011] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364889, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 768.694609] env[62619]: INFO nova.compute.manager [-] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Took 1.03 seconds to deallocate network for instance. [ 768.698873] env[62619]: DEBUG nova.compute.claims [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 768.699085] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 768.983691] env[62619]: DEBUG nova.scheduler.client.report [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 768.989221] env[62619]: INFO nova.scheduler.client.report [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Deleted allocations for instance c9c375f1-dd7f-43fb-acf6-45e766a8333d [ 769.186528] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364889, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 769.499336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.816s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.499921] env[62619]: ERROR nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Traceback (most recent call last): [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self.driver.spawn(context, instance, image_meta, [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self._vmops.spawn(context, instance, image_meta, injected_files, [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] vm_ref = self.build_virtual_machine(instance, [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] vif_infos = vmwarevif.get_vif_info(self._session, [ 769.499921] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] for vif in network_info: [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] return self._sync_wrapper(fn, *args, **kwargs) [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self.wait() [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self[:] = self._gt.wait() [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] return self._exit_event.wait() [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] current.throw(*self._exc) [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 769.500302] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] result = function(*args, **kwargs) [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] return func(*args, **kwargs) [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] raise e [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] nwinfo = self.network_api.allocate_for_instance( [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] created_port_ids = self._update_ports_for_instance( [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] with excutils.save_and_reraise_exception(): [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] self.force_reraise() [ 769.500749] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] raise self.value [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] updated_port = self._update_port( [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] _ensure_no_port_binding_failure(port) [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] raise exception.PortBindingFailed(port_id=port['id']) [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] nova.exception.PortBindingFailed: Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. [ 769.501183] env[62619]: ERROR nova.compute.manager [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] [ 769.501183] env[62619]: DEBUG nova.compute.utils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 769.503750] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Build of instance 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1 was re-scheduled: Binding failed for port f6ba4241-cc69-4f0f-9dd4-ca27d7eec3cb, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 769.503750] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 769.503750] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Acquiring lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 769.503750] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Acquired lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 769.504031] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 769.504942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.488s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 769.506647] env[62619]: INFO nova.compute.claims [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 769.509319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c2baa77b-5fcc-4666-b642-016bc11a42b0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "c9c375f1-dd7f-43fb-acf6-45e766a8333d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 128.410s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 769.684718] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364889, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.015403] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 770.040421] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 770.170262] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 770.185506] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364889, 'name': ReconfigVM_Task, 'duration_secs': 1.297381} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.186238] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Reconfigured VM instance instance-0000002f to attach disk [datastore2] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe/dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 770.187785] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-770ed1cc-6ae2-4fd1-bbaf-3543cbb4431b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.196809] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 770.196809] env[62619]: value = "task-1364890" [ 770.196809] env[62619]: _type = "Task" [ 770.196809] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.211074] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364890, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.541944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 770.674782] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Releasing lock "refresh_cache-8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 770.674906] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 770.675082] env[62619]: DEBUG nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 770.675251] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 770.692017] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 770.712562] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364890, 'name': Rename_Task, 'duration_secs': 0.137608} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 770.712873] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 770.713161] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b652d846-fa57-46d0-9809-0066717c5943 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.726317] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 770.726317] env[62619]: value = "task-1364892" [ 770.726317] env[62619]: _type = "Task" [ 770.726317] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 770.737161] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364892, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 770.817079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-972f0134-359a-4d6b-92c3-337bb83e4596 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.825079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84f8daae-29ff-48dc-9dac-37ddae4d88f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.856515] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c94b3187-0b0e-4982-b671-1c8d3d614170 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.865066] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a69e03d4-8329-4d7a-a517-3a839e6b6b42 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 770.879457] env[62619]: DEBUG nova.compute.provider_tree [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 771.194849] env[62619]: DEBUG nova.network.neutron [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 771.238251] env[62619]: DEBUG oslo_vmware.api [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364892, 'name': PowerOnVM_Task, 'duration_secs': 0.456694} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 771.239084] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 771.239397] env[62619]: DEBUG nova.compute.manager [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 771.240451] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6296321-b37c-409f-a824-e6da92b4c2af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 771.383405] env[62619]: DEBUG nova.scheduler.client.report [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 771.697439] env[62619]: INFO nova.compute.manager [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] [instance: 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1] Took 1.02 seconds to deallocate network for instance. [ 771.759339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 771.888499] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.383s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 771.889292] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 771.892239] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.387s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.162374] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "61d68c36-5251-4fad-9d3b-125296ae0861" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.162596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "61d68c36-5251-4fad-9d3b-125296ae0861" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.399985] env[62619]: DEBUG nova.compute.utils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 772.402174] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 772.402386] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 772.470034] env[62619]: DEBUG nova.policy [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '66d44144b6864c30b9e593927a12c756', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '928b8764106043caadbf11db62d3228e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 772.739667] env[62619]: INFO nova.scheduler.client.report [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Deleted allocations for instance 8e7c5e2e-c419-44c9-85b0-c9cc171b87f1 [ 772.778974] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Successfully created port: 6d0bb7c9-6909-4052-a644-3a785374f724 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 772.783996] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57d2459a-3615-4653-a03c-a9a348f45942 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.793949] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.794274] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.794560] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 772.794835] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 772.795074] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 772.797715] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-884822bb-90d4-4626-8c2c-6e305b6a8246 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.802057] env[62619]: INFO nova.compute.manager [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Terminating instance [ 772.803215] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "refresh_cache-dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 772.804466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquired lock "refresh_cache-dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 772.804466] env[62619]: DEBUG nova.network.neutron [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 772.834613] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4dddbda1-7d5a-4328-8c59-a2169b5f1f75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.844045] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4be9d13-930a-4ba7-a7f0-4ea7ac6b232b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 772.859925] env[62619]: DEBUG nova.compute.provider_tree [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 772.905986] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 773.250318] env[62619]: DEBUG oslo_concurrency.lockutils [None req-71c0a200-3df5-47f6-95f3-0b8e39ac3271 tempest-ServerTagsTestJSON-1344909590 tempest-ServerTagsTestJSON-1344909590-project-member] Lock "8e7c5e2e-c419-44c9-85b0-c9cc171b87f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 122.523s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.324896] env[62619]: DEBUG nova.network.neutron [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 773.364484] env[62619]: DEBUG nova.scheduler.client.report [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 773.465922] env[62619]: DEBUG nova.network.neutron [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 773.550671] env[62619]: DEBUG nova.compute.manager [req-9a89f2c1-365d-45dc-b4e1-1077d997220b req-534b8706-00b8-4d02-85ea-6d8983064978 service nova] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Received event network-changed-6d0bb7c9-6909-4052-a644-3a785374f724 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 773.551021] env[62619]: DEBUG nova.compute.manager [req-9a89f2c1-365d-45dc-b4e1-1077d997220b req-534b8706-00b8-4d02-85ea-6d8983064978 service nova] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Refreshing instance network info cache due to event network-changed-6d0bb7c9-6909-4052-a644-3a785374f724. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 773.551295] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a89f2c1-365d-45dc-b4e1-1077d997220b req-534b8706-00b8-4d02-85ea-6d8983064978 service nova] Acquiring lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.551449] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a89f2c1-365d-45dc-b4e1-1077d997220b req-534b8706-00b8-4d02-85ea-6d8983064978 service nova] Acquired lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.551600] env[62619]: DEBUG nova.network.neutron [req-9a89f2c1-365d-45dc-b4e1-1077d997220b req-534b8706-00b8-4d02-85ea-6d8983064978 service nova] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Refreshing network info cache for port 6d0bb7c9-6909-4052-a644-3a785374f724 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 773.671265] env[62619]: ERROR nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. [ 773.671265] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 773.671265] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 773.671265] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 773.671265] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 773.671265] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 773.671265] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 773.671265] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 773.671265] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.671265] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 773.671265] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.671265] env[62619]: ERROR nova.compute.manager raise self.value [ 773.671265] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 773.671265] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 773.671265] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 773.671265] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 773.672126] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 773.672126] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 773.672126] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. [ 773.672126] env[62619]: ERROR nova.compute.manager [ 773.672126] env[62619]: Traceback (most recent call last): [ 773.672126] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 773.672126] env[62619]: listener.cb(fileno) [ 773.672126] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 773.672126] env[62619]: result = function(*args, **kwargs) [ 773.672126] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 773.672126] env[62619]: return func(*args, **kwargs) [ 773.672126] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 773.672126] env[62619]: raise e [ 773.672126] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 773.672126] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 773.672126] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 773.672126] env[62619]: created_port_ids = self._update_ports_for_instance( [ 773.672126] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 773.672126] env[62619]: with excutils.save_and_reraise_exception(): [ 773.672126] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.672126] env[62619]: self.force_reraise() [ 773.672126] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.672126] env[62619]: raise self.value [ 773.672126] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 773.672126] env[62619]: updated_port = self._update_port( [ 773.672126] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 773.672126] env[62619]: _ensure_no_port_binding_failure(port) [ 773.672126] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 773.672126] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 773.673854] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. [ 773.673854] env[62619]: Removing descriptor: 18 [ 773.752389] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 773.878017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.983s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.878017] env[62619]: ERROR nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. [ 773.878017] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Traceback (most recent call last): [ 773.878017] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 773.878017] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self.driver.spawn(context, instance, image_meta, [ 773.878017] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 773.878017] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 773.878017] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 773.878017] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] vm_ref = self.build_virtual_machine(instance, [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] vif_infos = vmwarevif.get_vif_info(self._session, [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] for vif in network_info: [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] return self._sync_wrapper(fn, *args, **kwargs) [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self.wait() [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self[:] = self._gt.wait() [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] return self._exit_event.wait() [ 773.878389] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] current.throw(*self._exc) [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] result = function(*args, **kwargs) [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] return func(*args, **kwargs) [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] raise e [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] nwinfo = self.network_api.allocate_for_instance( [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] created_port_ids = self._update_ports_for_instance( [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 773.878791] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] with excutils.save_and_reraise_exception(): [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] self.force_reraise() [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] raise self.value [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] updated_port = self._update_port( [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] _ensure_no_port_binding_failure(port) [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] raise exception.PortBindingFailed(port_id=port['id']) [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] nova.exception.PortBindingFailed: Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. [ 773.879346] env[62619]: ERROR nova.compute.manager [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] [ 773.879701] env[62619]: DEBUG nova.compute.utils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 773.879701] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 18.304s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.884017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 773.884017] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 773.884017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.603s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 773.884017] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Build of instance 695dfaa6-8e34-4426-b025-6ce7e4e3174c was re-scheduled: Binding failed for port db858b6e-cad6-4091-9388-8d468e6eeaa7, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 773.884595] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 773.884953] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Acquiring lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 773.885219] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Acquired lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 773.885499] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 773.887140] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c67288-40f1-47bb-8d9d-5070449b1adb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.903951] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b12d4e-edb5-4b72-99e1-611c3c831cd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.925451] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 773.929133] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98a15566-a832-4bc6-b17b-78df678385ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.941081] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1bf866e-b25a-4936-ad00-7000f59aaef5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.980604] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Releasing lock "refresh_cache-dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 773.981167] env[62619]: DEBUG nova.compute.manager [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 773.981469] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 773.981848] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181400MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 773.981971] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 773.984829] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba44e93-bcd5-413b-a62b-7c6e3d7c731f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.993042] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 773.995374] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T17:01:42Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='f1d517a2-96aa-49f0-8b0f-a7179535cb04',id=38,is_public=True,memory_mb=192,name='tempest-test_resize_flavor_-742630744',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 773.995567] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 773.995717] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 773.995892] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 773.996074] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 773.996214] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 773.996398] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 773.996564] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 773.996744] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 773.996863] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 773.997123] env[62619]: DEBUG nova.virt.hardware [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 773.997390] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6018eafb-28cf-4ab8-8472-0aadf235d2be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 773.999841] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5a9a1b-6f1f-4191-9860-c54ee2f23ae4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.010178] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50d7acad-3eea-4ccb-8477-f040d3094943 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.013922] env[62619]: DEBUG oslo_vmware.api [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 774.013922] env[62619]: value = "task-1364894" [ 774.013922] env[62619]: _type = "Task" [ 774.013922] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.025738] env[62619]: ERROR nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Traceback (most recent call last): [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] yield resources [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self.driver.spawn(context, instance, image_meta, [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] vm_ref = self.build_virtual_machine(instance, [ 774.025738] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] vif_infos = vmwarevif.get_vif_info(self._session, [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] for vif in network_info: [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] return self._sync_wrapper(fn, *args, **kwargs) [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self.wait() [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self[:] = self._gt.wait() [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] return self._exit_event.wait() [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 774.026233] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] current.throw(*self._exc) [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] result = function(*args, **kwargs) [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] return func(*args, **kwargs) [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] raise e [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] nwinfo = self.network_api.allocate_for_instance( [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] created_port_ids = self._update_ports_for_instance( [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] with excutils.save_and_reraise_exception(): [ 774.030342] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self.force_reraise() [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] raise self.value [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] updated_port = self._update_port( [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] _ensure_no_port_binding_failure(port) [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] raise exception.PortBindingFailed(port_id=port['id']) [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] nova.exception.PortBindingFailed: Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. [ 774.030887] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] [ 774.030887] env[62619]: INFO nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Terminating instance [ 774.031309] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 774.033942] env[62619]: DEBUG oslo_vmware.api [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364894, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.076953] env[62619]: DEBUG nova.network.neutron [req-9a89f2c1-365d-45dc-b4e1-1077d997220b req-534b8706-00b8-4d02-85ea-6d8983064978 service nova] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 774.152127] env[62619]: DEBUG nova.network.neutron [req-9a89f2c1-365d-45dc-b4e1-1077d997220b req-534b8706-00b8-4d02-85ea-6d8983064978 service nova] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.280016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 774.487999] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 774.530683] env[62619]: DEBUG oslo_vmware.api [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364894, 'name': PowerOffVM_Task, 'duration_secs': 0.124368} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 774.531784] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 774.531784] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 774.531784] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fe6b03c2-29c3-4aa7-94c9-c0389ec8dd5a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.562591] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 774.563850] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 774.563850] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Deleting the datastore file [datastore2] dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 774.563850] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6237515f-c508-4212-9451-9a8e2f259051 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.576678] env[62619]: DEBUG oslo_vmware.api [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for the task: (returnval){ [ 774.576678] env[62619]: value = "task-1364896" [ 774.576678] env[62619]: _type = "Task" [ 774.576678] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 774.590703] env[62619]: DEBUG oslo_vmware.api [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 774.656865] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 774.660628] env[62619]: DEBUG oslo_concurrency.lockutils [req-9a89f2c1-365d-45dc-b4e1-1077d997220b req-534b8706-00b8-4d02-85ea-6d8983064978 service nova] Releasing lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 774.660628] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquired lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 774.660628] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 774.733714] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57dbccbf-36d5-4a79-97ab-433a6cecbb1d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.742424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-222922bb-4763-47df-a1d0-2a13f262ce15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.785017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1eb420a-a684-4495-b6d9-dbb7b3a048fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.794605] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d5264f0-c067-49bb-8f3f-7e982ebb1add {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 774.811836] env[62619]: DEBUG nova.compute.provider_tree [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 775.089982] env[62619]: DEBUG oslo_vmware.api [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Task: {'id': task-1364896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223518} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 775.089982] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 775.089982] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 775.089982] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 775.089982] env[62619]: INFO nova.compute.manager [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Took 1.11 seconds to destroy the instance on the hypervisor. [ 775.090251] env[62619]: DEBUG oslo.service.loopingcall [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 775.090251] env[62619]: DEBUG nova.compute.manager [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 775.090251] env[62619]: DEBUG nova.network.neutron [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 775.114235] env[62619]: DEBUG nova.network.neutron [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 775.133819] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquiring lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 775.134111] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.159159] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Releasing lock "refresh_cache-695dfaa6-8e34-4426-b025-6ce7e4e3174c" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.159415] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 775.159596] env[62619]: DEBUG nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 775.159759] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 775.180875] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 775.183124] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 775.242490] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.315820] env[62619]: DEBUG nova.scheduler.client.report [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 775.578145] env[62619]: DEBUG nova.compute.manager [req-6ad7bcf4-81ff-45aa-b855-364408479c28 req-49ec6a86-97f0-4b44-ba92-692f31420ba4 service nova] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Received event network-vif-deleted-6d0bb7c9-6909-4052-a644-3a785374f724 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 775.617517] env[62619]: DEBUG nova.network.neutron [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.686845] env[62619]: DEBUG nova.network.neutron [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 775.749507] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Releasing lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 775.749507] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 775.749507] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 775.749507] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0bdab91a-c898-4abf-b66d-7eb2b8651579 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.759650] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d2d465e-9ec5-4f89-a841-79c455021f0f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 775.787472] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance a2ed3e4d-40c2-46b0-9892-0e9cce7b330b could not be found. [ 775.788307] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 775.788307] env[62619]: INFO nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 775.789396] env[62619]: DEBUG oslo.service.loopingcall [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 775.789396] env[62619]: DEBUG nova.compute.manager [-] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 775.789396] env[62619]: DEBUG nova.network.neutron [-] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 775.817316] env[62619]: DEBUG nova.network.neutron [-] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 775.826069] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.945s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 775.827208] env[62619]: ERROR nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Traceback (most recent call last): [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self.driver.spawn(context, instance, image_meta, [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] vm_ref = self.build_virtual_machine(instance, [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] vif_infos = vmwarevif.get_vif_info(self._session, [ 775.827208] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] for vif in network_info: [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] return self._sync_wrapper(fn, *args, **kwargs) [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self.wait() [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self[:] = self._gt.wait() [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] return self._exit_event.wait() [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] current.throw(*self._exc) [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 775.827891] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] result = function(*args, **kwargs) [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] return func(*args, **kwargs) [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] raise e [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] nwinfo = self.network_api.allocate_for_instance( [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] created_port_ids = self._update_ports_for_instance( [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] with excutils.save_and_reraise_exception(): [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] self.force_reraise() [ 775.828554] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] raise self.value [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] updated_port = self._update_port( [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] _ensure_no_port_binding_failure(port) [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] raise exception.PortBindingFailed(port_id=port['id']) [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] nova.exception.PortBindingFailed: Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. [ 775.829266] env[62619]: ERROR nova.compute.manager [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] [ 775.829266] env[62619]: DEBUG nova.compute.utils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 775.833032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.920s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 775.834932] env[62619]: INFO nova.compute.claims [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 775.841954] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Build of instance eda91c9e-886e-468e-b9eb-0435c1e94cd3 was re-scheduled: Binding failed for port f2f68fb9-7251-4077-bd6b-f5696880cac6, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 775.842501] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 775.842732] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Acquiring lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 775.842870] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Acquired lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 775.843041] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 776.118683] env[62619]: INFO nova.compute.manager [-] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Took 1.03 seconds to deallocate network for instance. [ 776.188771] env[62619]: INFO nova.compute.manager [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] [instance: 695dfaa6-8e34-4426-b025-6ce7e4e3174c] Took 1.03 seconds to deallocate network for instance. [ 776.326359] env[62619]: DEBUG nova.network.neutron [-] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.364818] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 776.459833] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 776.626949] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.829665] env[62619]: INFO nova.compute.manager [-] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Took 1.04 seconds to deallocate network for instance. [ 776.832511] env[62619]: DEBUG nova.compute.claims [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 776.832694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 776.963049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Releasing lock "refresh_cache-eda91c9e-886e-468e-b9eb-0435c1e94cd3" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 776.963284] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 776.963474] env[62619]: DEBUG nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 776.963642] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 776.994010] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 777.127576] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68f8ffdd-8c50-4072-8837-dfb021b3c730 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.135576] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11ea7a52-b128-48a2-b9c1-4de64f70a865 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.167413] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2238872-88db-45b5-ab6c-45b397cee6b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.175592] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7561ea4-93cd-47bf-9f33-d9c204671f96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 777.190963] env[62619]: DEBUG nova.compute.provider_tree [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 777.219177] env[62619]: INFO nova.scheduler.client.report [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Deleted allocations for instance 695dfaa6-8e34-4426-b025-6ce7e4e3174c [ 777.496426] env[62619]: DEBUG nova.network.neutron [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 777.694282] env[62619]: DEBUG nova.scheduler.client.report [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 777.727541] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3aa6ed4e-be16-487b-81a5-8ab380367e80 tempest-ServerPasswordTestJSON-1103504192 tempest-ServerPasswordTestJSON-1103504192-project-member] Lock "695dfaa6-8e34-4426-b025-6ce7e4e3174c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 125.073s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 777.999666] env[62619]: INFO nova.compute.manager [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] [instance: eda91c9e-886e-468e-b9eb-0435c1e94cd3] Took 1.04 seconds to deallocate network for instance. [ 778.093867] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Acquiring lock "c30e0db3-9b63-44b7-9b7f-810defc530d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.094255] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "c30e0db3-9b63-44b7-9b7f-810defc530d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.201728] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.369s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 778.201728] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 778.203384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.840s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 778.205814] env[62619]: INFO nova.compute.claims [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 778.230143] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 778.709990] env[62619]: DEBUG nova.compute.utils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 778.713109] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 778.713291] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 778.752863] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 778.773247] env[62619]: DEBUG nova.policy [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3041343376d4f2fad14577d5c412b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4270942193cd4a9aa397784368b9ae64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 779.038628] env[62619]: INFO nova.scheduler.client.report [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Deleted allocations for instance eda91c9e-886e-468e-b9eb-0435c1e94cd3 [ 779.216341] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 779.264810] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Successfully created port: 996abc5d-953f-4c72-a76c-b2b161d80b85 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 779.550551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-67b78c50-31ec-4718-896d-7b408ca57961 tempest-ServersNegativeTestMultiTenantJSON-130378985 tempest-ServersNegativeTestMultiTenantJSON-130378985-project-member] Lock "eda91c9e-886e-468e-b9eb-0435c1e94cd3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 114.722s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 779.564982] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23e06963-a788-4f7c-8ed9-7d7ed607a90c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.574354] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8246daf9-12bb-4ea9-90c8-94f684bfd857 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.607383] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82f10ee8-33d0-4f97-896a-dfd2d76ab7be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.615361] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2a68d61-0910-4eb9-a33c-06617947cba0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 779.630121] env[62619]: DEBUG nova.compute.provider_tree [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 780.054203] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 780.132651] env[62619]: DEBUG nova.scheduler.client.report [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 780.233371] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 780.259779] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 780.260065] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 780.260246] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 780.260430] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 780.260572] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 780.260712] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 780.260911] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 780.261075] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 780.261239] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 780.261407] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 780.261567] env[62619]: DEBUG nova.virt.hardware [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 780.262439] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36e4d9b6-6902-4925-9c88-fbe19c1b92fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.270715] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d17de7a-40b2-407f-84d6-eb1b672c1d8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 780.585484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 780.638039] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.434s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 780.638600] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 780.642044] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.015s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 780.978063] env[62619]: DEBUG nova.compute.manager [req-f27299f9-96fa-420d-96fc-91ec115e8fe4 req-8ca5780e-7288-402f-bc27-7af68dc2aee2 service nova] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Received event network-changed-996abc5d-953f-4c72-a76c-b2b161d80b85 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 780.978270] env[62619]: DEBUG nova.compute.manager [req-f27299f9-96fa-420d-96fc-91ec115e8fe4 req-8ca5780e-7288-402f-bc27-7af68dc2aee2 service nova] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Refreshing instance network info cache due to event network-changed-996abc5d-953f-4c72-a76c-b2b161d80b85. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 780.978478] env[62619]: DEBUG oslo_concurrency.lockutils [req-f27299f9-96fa-420d-96fc-91ec115e8fe4 req-8ca5780e-7288-402f-bc27-7af68dc2aee2 service nova] Acquiring lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 780.978611] env[62619]: DEBUG oslo_concurrency.lockutils [req-f27299f9-96fa-420d-96fc-91ec115e8fe4 req-8ca5780e-7288-402f-bc27-7af68dc2aee2 service nova] Acquired lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 780.978760] env[62619]: DEBUG nova.network.neutron [req-f27299f9-96fa-420d-96fc-91ec115e8fe4 req-8ca5780e-7288-402f-bc27-7af68dc2aee2 service nova] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Refreshing network info cache for port 996abc5d-953f-4c72-a76c-b2b161d80b85 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 781.152554] env[62619]: DEBUG nova.compute.utils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 781.156969] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 781.156969] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 781.229401] env[62619]: DEBUG nova.policy [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '2d39ad3546f24f9abf24556be82a312e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b8586e2f6e4047b59032679e9c57843a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 781.229401] env[62619]: ERROR nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. [ 781.229401] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 781.229401] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.229401] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 781.229401] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 781.229401] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 781.229401] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 781.229401] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 781.229945] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.229945] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 781.229945] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.229945] env[62619]: ERROR nova.compute.manager raise self.value [ 781.229945] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 781.229945] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 781.229945] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.229945] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 781.229945] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.229945] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 781.229945] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. [ 781.229945] env[62619]: ERROR nova.compute.manager [ 781.229945] env[62619]: Traceback (most recent call last): [ 781.229945] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 781.229945] env[62619]: listener.cb(fileno) [ 781.229945] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 781.229945] env[62619]: result = function(*args, **kwargs) [ 781.229945] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 781.229945] env[62619]: return func(*args, **kwargs) [ 781.229945] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 781.229945] env[62619]: raise e [ 781.229945] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.229945] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 781.229945] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 781.230806] env[62619]: created_port_ids = self._update_ports_for_instance( [ 781.230806] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 781.230806] env[62619]: with excutils.save_and_reraise_exception(): [ 781.230806] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.230806] env[62619]: self.force_reraise() [ 781.230806] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.230806] env[62619]: raise self.value [ 781.230806] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 781.230806] env[62619]: updated_port = self._update_port( [ 781.230806] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.230806] env[62619]: _ensure_no_port_binding_failure(port) [ 781.230806] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.230806] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 781.230806] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. [ 781.230806] env[62619]: Removing descriptor: 16 [ 781.230806] env[62619]: ERROR nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. [ 781.230806] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Traceback (most recent call last): [ 781.230806] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 781.230806] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] yield resources [ 781.230806] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self.driver.spawn(context, instance, image_meta, [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] vm_ref = self.build_virtual_machine(instance, [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] vif_infos = vmwarevif.get_vif_info(self._session, [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] for vif in network_info: [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] return self._sync_wrapper(fn, *args, **kwargs) [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 781.231624] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self.wait() [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self[:] = self._gt.wait() [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] return self._exit_event.wait() [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] result = hub.switch() [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] return self.greenlet.switch() [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] result = function(*args, **kwargs) [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] return func(*args, **kwargs) [ 781.232393] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] raise e [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] nwinfo = self.network_api.allocate_for_instance( [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] created_port_ids = self._update_ports_for_instance( [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] with excutils.save_and_reraise_exception(): [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self.force_reraise() [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] raise self.value [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 781.232872] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] updated_port = self._update_port( [ 781.233361] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 781.233361] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] _ensure_no_port_binding_failure(port) [ 781.233361] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 781.233361] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] raise exception.PortBindingFailed(port_id=port['id']) [ 781.233361] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] nova.exception.PortBindingFailed: Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. [ 781.233361] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] [ 781.233361] env[62619]: INFO nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Terminating instance [ 781.235938] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 781.426446] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b15fac59-e35c-46a3-8eaa-dbc903497bbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.438314] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c767e6f-66d3-4002-9a8f-7a69bacab075 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.474183] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f1f11c1-a003-40f0-9e2c-68593e37307e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.482633] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8c43b7-4490-4b36-bc92-4465b1e80201 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 781.500955] env[62619]: DEBUG nova.compute.provider_tree [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 781.512987] env[62619]: DEBUG nova.network.neutron [req-f27299f9-96fa-420d-96fc-91ec115e8fe4 req-8ca5780e-7288-402f-bc27-7af68dc2aee2 service nova] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 781.658891] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 781.671135] env[62619]: DEBUG nova.network.neutron [req-f27299f9-96fa-420d-96fc-91ec115e8fe4 req-8ca5780e-7288-402f-bc27-7af68dc2aee2 service nova] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 781.678405] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Successfully created port: 4ee99cf4-64eb-4671-8b04-cabe02112919 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 782.008237] env[62619]: DEBUG nova.scheduler.client.report [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 782.177770] env[62619]: DEBUG oslo_concurrency.lockutils [req-f27299f9-96fa-420d-96fc-91ec115e8fe4 req-8ca5780e-7288-402f-bc27-7af68dc2aee2 service nova] Releasing lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 782.178236] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.178436] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 782.511276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.869s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 782.511900] env[62619]: ERROR nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Traceback (most recent call last): [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self.driver.spawn(context, instance, image_meta, [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self._vmops.spawn(context, instance, image_meta, injected_files, [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] vm_ref = self.build_virtual_machine(instance, [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] vif_infos = vmwarevif.get_vif_info(self._session, [ 782.511900] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] for vif in network_info: [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] return self._sync_wrapper(fn, *args, **kwargs) [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self.wait() [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self[:] = self._gt.wait() [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] return self._exit_event.wait() [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] current.throw(*self._exc) [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 782.512357] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] result = function(*args, **kwargs) [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] return func(*args, **kwargs) [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] raise e [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] nwinfo = self.network_api.allocate_for_instance( [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] created_port_ids = self._update_ports_for_instance( [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] with excutils.save_and_reraise_exception(): [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] self.force_reraise() [ 782.512735] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] raise self.value [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] updated_port = self._update_port( [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] _ensure_no_port_binding_failure(port) [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] raise exception.PortBindingFailed(port_id=port['id']) [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] nova.exception.PortBindingFailed: Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. [ 782.513130] env[62619]: ERROR nova.compute.manager [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] [ 782.513130] env[62619]: DEBUG nova.compute.utils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 782.514142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.296s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 782.515737] env[62619]: INFO nova.compute.claims [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 782.518849] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Build of instance eedbd5eb-e431-477e-a817-acb8f54fa511 was re-scheduled: Binding failed for port c0e0a74b-ce7a-4e90-8465-6999f72205cd, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 782.521340] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 782.522038] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Acquiring lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.522262] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Acquired lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.522491] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 782.670925] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 782.710533] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 782.715113] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 782.715352] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 782.715506] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 782.715684] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 782.715823] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 782.715965] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 782.716868] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 782.716868] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 782.716868] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 782.716868] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 782.716868] env[62619]: DEBUG nova.virt.hardware [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 782.718017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b16ced6-d4f0-4c01-993a-a3cfe9cda044 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.728457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4e6b319-eb80-4f04-9c60-1cc4ec4d9da3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 782.751529] env[62619]: DEBUG nova.compute.manager [req-dc69fd43-795c-4bef-b373-1b04f2b0484f req-56fb0aa1-36a9-4bb0-852a-7dd2621006c2 service nova] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Received event network-changed-4ee99cf4-64eb-4671-8b04-cabe02112919 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 782.751718] env[62619]: DEBUG nova.compute.manager [req-dc69fd43-795c-4bef-b373-1b04f2b0484f req-56fb0aa1-36a9-4bb0-852a-7dd2621006c2 service nova] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Refreshing instance network info cache due to event network-changed-4ee99cf4-64eb-4671-8b04-cabe02112919. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 782.751919] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc69fd43-795c-4bef-b373-1b04f2b0484f req-56fb0aa1-36a9-4bb0-852a-7dd2621006c2 service nova] Acquiring lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 782.752061] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc69fd43-795c-4bef-b373-1b04f2b0484f req-56fb0aa1-36a9-4bb0-852a-7dd2621006c2 service nova] Acquired lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 782.752211] env[62619]: DEBUG nova.network.neutron [req-dc69fd43-795c-4bef-b373-1b04f2b0484f req-56fb0aa1-36a9-4bb0-852a-7dd2621006c2 service nova] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Refreshing network info cache for port 4ee99cf4-64eb-4671-8b04-cabe02112919 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 782.822017] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.005823] env[62619]: DEBUG nova.compute.manager [req-ad8c8742-11f7-4018-aa57-ce6c4db9fc2f req-fbbc6555-bf71-4447-83cf-145e4dd0c53a service nova] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Received event network-vif-deleted-996abc5d-953f-4c72-a76c-b2b161d80b85 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 783.044421] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.115923] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.198082] env[62619]: ERROR nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. [ 783.198082] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 783.198082] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 783.198082] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 783.198082] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 783.198082] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 783.198082] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 783.198082] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 783.198082] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 783.198082] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 783.198082] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 783.198082] env[62619]: ERROR nova.compute.manager raise self.value [ 783.198082] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 783.198082] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 783.198082] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 783.198082] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 783.199296] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 783.199296] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 783.199296] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. [ 783.199296] env[62619]: ERROR nova.compute.manager [ 783.199296] env[62619]: Traceback (most recent call last): [ 783.199296] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 783.199296] env[62619]: listener.cb(fileno) [ 783.199296] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 783.199296] env[62619]: result = function(*args, **kwargs) [ 783.199296] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 783.199296] env[62619]: return func(*args, **kwargs) [ 783.199296] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 783.199296] env[62619]: raise e [ 783.199296] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 783.199296] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 783.199296] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 783.199296] env[62619]: created_port_ids = self._update_ports_for_instance( [ 783.199296] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 783.199296] env[62619]: with excutils.save_and_reraise_exception(): [ 783.199296] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 783.199296] env[62619]: self.force_reraise() [ 783.199296] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 783.199296] env[62619]: raise self.value [ 783.199296] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 783.199296] env[62619]: updated_port = self._update_port( [ 783.199296] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 783.199296] env[62619]: _ensure_no_port_binding_failure(port) [ 783.199296] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 783.199296] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 783.200424] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. [ 783.200424] env[62619]: Removing descriptor: 18 [ 783.200424] env[62619]: ERROR nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Traceback (most recent call last): [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] yield resources [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self.driver.spawn(context, instance, image_meta, [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 783.200424] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] vm_ref = self.build_virtual_machine(instance, [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] vif_infos = vmwarevif.get_vif_info(self._session, [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] for vif in network_info: [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] return self._sync_wrapper(fn, *args, **kwargs) [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self.wait() [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self[:] = self._gt.wait() [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] return self._exit_event.wait() [ 783.200959] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] result = hub.switch() [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] return self.greenlet.switch() [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] result = function(*args, **kwargs) [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] return func(*args, **kwargs) [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] raise e [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] nwinfo = self.network_api.allocate_for_instance( [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 783.201442] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] created_port_ids = self._update_ports_for_instance( [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] with excutils.save_and_reraise_exception(): [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self.force_reraise() [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] raise self.value [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] updated_port = self._update_port( [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] _ensure_no_port_binding_failure(port) [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 783.202134] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] raise exception.PortBindingFailed(port_id=port['id']) [ 783.202560] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] nova.exception.PortBindingFailed: Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. [ 783.202560] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] [ 783.202560] env[62619]: INFO nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Terminating instance [ 783.202560] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Acquiring lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 783.273021] env[62619]: DEBUG nova.network.neutron [req-dc69fd43-795c-4bef-b373-1b04f2b0484f req-56fb0aa1-36a9-4bb0-852a-7dd2621006c2 service nova] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.324611] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.325110] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 783.325314] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 783.325630] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-afec0dea-bdef-48ec-84a9-9495148de7fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.337359] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51b0ff9b-0697-455e-959a-c6ea96914feb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.368264] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 28a37e28-6bca-4647-9cba-345da2f973a2 could not be found. [ 783.368578] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 783.368841] env[62619]: INFO nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Took 0.04 seconds to destroy the instance on the hypervisor. [ 783.369208] env[62619]: DEBUG oslo.service.loopingcall [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 783.370764] env[62619]: DEBUG nova.compute.manager [-] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.370764] env[62619]: DEBUG nova.network.neutron [-] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.388534] env[62619]: DEBUG nova.network.neutron [-] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.421077] env[62619]: DEBUG nova.network.neutron [req-dc69fd43-795c-4bef-b373-1b04f2b0484f req-56fb0aa1-36a9-4bb0-852a-7dd2621006c2 service nova] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.619565] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Releasing lock "refresh_cache-eedbd5eb-e431-477e-a817-acb8f54fa511" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.619765] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 783.619954] env[62619]: DEBUG nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 783.620149] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 783.640066] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 783.802534] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c1842f5-7234-4d61-8fad-cd79d0fd29e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.811125] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3580e986-0d67-4086-8cc3-c9339596e5a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.847726] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188ac485-86db-4549-b446-4ca816b1c90b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.855978] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1d980bc-eb8d-47f8-9d58-e5053f5698e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 783.870413] env[62619]: DEBUG nova.compute.provider_tree [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 783.891484] env[62619]: DEBUG nova.network.neutron [-] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 783.923592] env[62619]: DEBUG oslo_concurrency.lockutils [req-dc69fd43-795c-4bef-b373-1b04f2b0484f req-56fb0aa1-36a9-4bb0-852a-7dd2621006c2 service nova] Releasing lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 783.923950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Acquired lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 783.924143] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 784.142985] env[62619]: DEBUG nova.network.neutron [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.374195] env[62619]: DEBUG nova.scheduler.client.report [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 784.395121] env[62619]: INFO nova.compute.manager [-] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Took 1.03 seconds to deallocate network for instance. [ 784.397755] env[62619]: DEBUG nova.compute.claims [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 784.398181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 784.443158] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 784.533920] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 784.645614] env[62619]: INFO nova.compute.manager [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] [instance: eedbd5eb-e431-477e-a817-acb8f54fa511] Took 1.03 seconds to deallocate network for instance. [ 784.777088] env[62619]: DEBUG nova.compute.manager [req-8a15f95a-4f21-4455-93d4-9b5157361f44 req-86814889-8f13-4524-aa30-711e75031c45 service nova] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Received event network-vif-deleted-4ee99cf4-64eb-4671-8b04-cabe02112919 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 784.881279] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.365s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 784.881279] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 784.883162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 16.184s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 785.035316] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Releasing lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 785.035801] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 785.036017] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 785.036712] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-dfdd4b0b-1581-4ab3-b240-b02dda16531e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.046997] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-735a0b55-a458-4236-98db-9820f5c6dd90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.069938] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5d7a9c55-b148-4115-8390-66b2501f859a could not be found. [ 785.070224] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 785.070388] env[62619]: INFO nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Took 0.03 seconds to destroy the instance on the hypervisor. [ 785.070630] env[62619]: DEBUG oslo.service.loopingcall [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 785.070835] env[62619]: DEBUG nova.compute.manager [-] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 785.070936] env[62619]: DEBUG nova.network.neutron [-] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 785.086636] env[62619]: DEBUG nova.network.neutron [-] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 785.388165] env[62619]: DEBUG nova.compute.utils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 785.394018] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 785.394018] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 785.444037] env[62619]: DEBUG nova.policy [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '359ce2c1621c42229089e6e48d0e645f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'debbfd22f0504759b386c0d56a9320da', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 785.592726] env[62619]: DEBUG nova.network.neutron [-] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 785.614567] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5c4917-d860-4eee-a97f-ae4de4c9299e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.622955] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f455c532-edb9-4f6e-9ab5-8a48eafbc32b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.653167] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-255cb255-8ed6-4247-86be-610ac3d05820 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.664095] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec23b166-d244-4132-b24f-659cb6a2c7d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 785.678651] env[62619]: DEBUG nova.compute.provider_tree [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 785.680057] env[62619]: INFO nova.scheduler.client.report [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Deleted allocations for instance eedbd5eb-e431-477e-a817-acb8f54fa511 [ 785.740991] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Successfully created port: 11ea45c7-21d2-4b92-b4bb-d36ca07e614b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 785.897022] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 786.095161] env[62619]: INFO nova.compute.manager [-] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Took 1.02 seconds to deallocate network for instance. [ 786.097672] env[62619]: DEBUG nova.compute.claims [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 786.097844] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 786.187184] env[62619]: DEBUG nova.scheduler.client.report [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 786.190777] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a4b20828-a034-45d8-aab0-a3a1fbf10a97 tempest-ServerAddressesTestJSON-2081107117 tempest-ServerAddressesTestJSON-2081107117-project-member] Lock "eedbd5eb-e431-477e-a817-acb8f54fa511" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.584s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.691936] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.809s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 786.692604] env[62619]: ERROR nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Traceback (most recent call last): [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self.driver.spawn(context, instance, image_meta, [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self._vmops.spawn(context, instance, image_meta, injected_files, [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] vm_ref = self.build_virtual_machine(instance, [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] vif_infos = vmwarevif.get_vif_info(self._session, [ 786.692604] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] for vif in network_info: [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] return self._sync_wrapper(fn, *args, **kwargs) [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self.wait() [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self[:] = self._gt.wait() [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] return self._exit_event.wait() [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] current.throw(*self._exc) [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 786.693015] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] result = function(*args, **kwargs) [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] return func(*args, **kwargs) [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] raise e [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] nwinfo = self.network_api.allocate_for_instance( [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] created_port_ids = self._update_ports_for_instance( [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] with excutils.save_and_reraise_exception(): [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] self.force_reraise() [ 786.693464] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] raise self.value [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] updated_port = self._update_port( [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] _ensure_no_port_binding_failure(port) [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] raise exception.PortBindingFailed(port_id=port['id']) [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] nova.exception.PortBindingFailed: Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. [ 786.693909] env[62619]: ERROR nova.compute.manager [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] [ 786.693909] env[62619]: DEBUG nova.compute.utils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 786.694539] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.153s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 786.696378] env[62619]: INFO nova.compute.claims [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 786.704290] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 786.705596] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Build of instance 5ffd77a0-df9a-461c-837e-05b4ff66ea52 was re-scheduled: Binding failed for port f20a72bb-fd85-4120-b420-d0c516473eed, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 786.705812] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 786.706045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquiring lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.706213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Acquired lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.706376] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 786.810269] env[62619]: DEBUG nova.compute.manager [req-c47a4224-3d49-4905-9ce6-dbc6ccfc857a req-3ccee2b3-8094-48dc-a5e7-1ab581470d7e service nova] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Received event network-changed-11ea45c7-21d2-4b92-b4bb-d36ca07e614b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 786.810465] env[62619]: DEBUG nova.compute.manager [req-c47a4224-3d49-4905-9ce6-dbc6ccfc857a req-3ccee2b3-8094-48dc-a5e7-1ab581470d7e service nova] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Refreshing instance network info cache due to event network-changed-11ea45c7-21d2-4b92-b4bb-d36ca07e614b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 786.810674] env[62619]: DEBUG oslo_concurrency.lockutils [req-c47a4224-3d49-4905-9ce6-dbc6ccfc857a req-3ccee2b3-8094-48dc-a5e7-1ab581470d7e service nova] Acquiring lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 786.810820] env[62619]: DEBUG oslo_concurrency.lockutils [req-c47a4224-3d49-4905-9ce6-dbc6ccfc857a req-3ccee2b3-8094-48dc-a5e7-1ab581470d7e service nova] Acquired lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 786.810959] env[62619]: DEBUG nova.network.neutron [req-c47a4224-3d49-4905-9ce6-dbc6ccfc857a req-3ccee2b3-8094-48dc-a5e7-1ab581470d7e service nova] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Refreshing network info cache for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 786.906007] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 786.930685] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 786.930934] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 786.931107] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 786.931291] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 786.931434] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 786.931575] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 786.931774] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 786.931925] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 786.932099] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 786.932257] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 786.932424] env[62619]: DEBUG nova.virt.hardware [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 786.933299] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bd37f43-d069-4f83-8b68-3df42b0704da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 786.942237] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c35f2f45-3653-4e87-8011-0f0fe0f223f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.009171] env[62619]: ERROR nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. [ 787.009171] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 787.009171] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.009171] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 787.009171] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 787.009171] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 787.009171] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 787.009171] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 787.009171] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.009171] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 787.009171] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.009171] env[62619]: ERROR nova.compute.manager raise self.value [ 787.009171] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 787.009171] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 787.009171] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.009171] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 787.010018] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.010018] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 787.010018] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. [ 787.010018] env[62619]: ERROR nova.compute.manager [ 787.010018] env[62619]: Traceback (most recent call last): [ 787.010018] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 787.010018] env[62619]: listener.cb(fileno) [ 787.010018] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.010018] env[62619]: result = function(*args, **kwargs) [ 787.010018] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 787.010018] env[62619]: return func(*args, **kwargs) [ 787.010018] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 787.010018] env[62619]: raise e [ 787.010018] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.010018] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 787.010018] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 787.010018] env[62619]: created_port_ids = self._update_ports_for_instance( [ 787.010018] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 787.010018] env[62619]: with excutils.save_and_reraise_exception(): [ 787.010018] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.010018] env[62619]: self.force_reraise() [ 787.010018] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.010018] env[62619]: raise self.value [ 787.010018] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 787.010018] env[62619]: updated_port = self._update_port( [ 787.010018] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.010018] env[62619]: _ensure_no_port_binding_failure(port) [ 787.010018] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.010018] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 787.011613] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. [ 787.011613] env[62619]: Removing descriptor: 18 [ 787.011613] env[62619]: ERROR nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Traceback (most recent call last): [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] yield resources [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self.driver.spawn(context, instance, image_meta, [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 787.011613] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] vm_ref = self.build_virtual_machine(instance, [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] for vif in network_info: [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] return self._sync_wrapper(fn, *args, **kwargs) [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self.wait() [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self[:] = self._gt.wait() [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] return self._exit_event.wait() [ 787.012287] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] result = hub.switch() [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] return self.greenlet.switch() [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] result = function(*args, **kwargs) [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] return func(*args, **kwargs) [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] raise e [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] nwinfo = self.network_api.allocate_for_instance( [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 787.012995] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] created_port_ids = self._update_ports_for_instance( [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] with excutils.save_and_reraise_exception(): [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self.force_reraise() [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] raise self.value [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] updated_port = self._update_port( [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] _ensure_no_port_binding_failure(port) [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 787.013763] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] raise exception.PortBindingFailed(port_id=port['id']) [ 787.014505] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] nova.exception.PortBindingFailed: Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. [ 787.014505] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] [ 787.014505] env[62619]: INFO nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Terminating instance [ 787.014505] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 787.227969] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 787.237840] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.330036] env[62619]: DEBUG nova.network.neutron [req-c47a4224-3d49-4905-9ce6-dbc6ccfc857a req-3ccee2b3-8094-48dc-a5e7-1ab581470d7e service nova] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.366246] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.431756] env[62619]: DEBUG nova.network.neutron [req-c47a4224-3d49-4905-9ce6-dbc6ccfc857a req-3ccee2b3-8094-48dc-a5e7-1ab581470d7e service nova] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 787.869602] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Releasing lock "refresh_cache-5ffd77a0-df9a-461c-837e-05b4ff66ea52" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.869848] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 787.870016] env[62619]: DEBUG nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 787.870188] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 787.885615] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 787.934841] env[62619]: DEBUG oslo_concurrency.lockutils [req-c47a4224-3d49-4905-9ce6-dbc6ccfc857a req-3ccee2b3-8094-48dc-a5e7-1ab581470d7e service nova] Releasing lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 787.935401] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquired lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 787.935659] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 787.978083] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc067e30-2888-433d-8b2c-e97329cb9e99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 787.986576] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8c3e5f9-6a1d-4226-a5f9-aa6ec4cb7aa9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.017050] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3d0b2f5-3c81-4b40-bc17-74408dfad102 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.024842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96742a9c-295a-4cfb-a9e4-2bdb22882b14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 788.037943] env[62619]: DEBUG nova.compute.provider_tree [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 788.391663] env[62619]: DEBUG nova.network.neutron [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.451844] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 788.530450] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 788.540946] env[62619]: DEBUG nova.scheduler.client.report [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 788.836022] env[62619]: DEBUG nova.compute.manager [req-cf993e19-7dda-4460-9c81-fba1789f0493 req-a5b3e8c0-de11-4599-b5ee-3412e0edf7df service nova] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Received event network-vif-deleted-11ea45c7-21d2-4b92-b4bb-d36ca07e614b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 788.895033] env[62619]: INFO nova.compute.manager [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] [instance: 5ffd77a0-df9a-461c-837e-05b4ff66ea52] Took 1.02 seconds to deallocate network for instance. [ 789.032584] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Releasing lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 789.033041] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 789.033247] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 789.033540] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e811565f-fa3a-4fd9-ab73-0f1782fdba2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.043401] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff056307-505f-4c7b-bb5f-8a2ba52d44e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 789.054400] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.360s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 789.054842] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 789.057900] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 17.300s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 789.058201] env[62619]: DEBUG nova.objects.instance [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 789.076029] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 0ccdb6cd-d895-42f9-83fa-007c69ce77f9 could not be found. [ 789.076029] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 789.076029] env[62619]: INFO nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Took 0.04 seconds to destroy the instance on the hypervisor. [ 789.076029] env[62619]: DEBUG oslo.service.loopingcall [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 789.076029] env[62619]: DEBUG nova.compute.manager [-] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 789.076029] env[62619]: DEBUG nova.network.neutron [-] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 789.089691] env[62619]: DEBUG nova.network.neutron [-] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 789.559378] env[62619]: DEBUG nova.compute.utils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 789.561106] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 789.561274] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 789.591462] env[62619]: DEBUG nova.network.neutron [-] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 789.766493] env[62619]: DEBUG nova.policy [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1e90a23c6444273bc10051f3227804c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '998daea123aa48b2816d1cbe9e662950', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 789.930525] env[62619]: INFO nova.scheduler.client.report [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Deleted allocations for instance 5ffd77a0-df9a-461c-837e-05b4ff66ea52 [ 790.069166] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 790.075530] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1be2f8e9-bf99-4155-b02c-1154f0abaa1a tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 1.015s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.075530] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 16.092s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 790.093977] env[62619]: INFO nova.compute.manager [-] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Took 1.02 seconds to deallocate network for instance. [ 790.098099] env[62619]: DEBUG nova.compute.claims [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 790.098295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 790.110146] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Successfully created port: e2771821-b75a-44ca-84c8-ad9751ad108f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 790.440090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8cb3cd60-664d-428c-966f-39abb41ed7aa tempest-VolumesAdminNegativeTest-1916042834 tempest-VolumesAdminNegativeTest-1916042834-project-member] Lock "5ffd77a0-df9a-461c-837e-05b4ff66ea52" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 119.060s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 790.914178] env[62619]: DEBUG nova.compute.manager [req-ed13f50c-bf8b-42cc-87c9-658dd767cde7 req-a3055c9a-dd08-4fb7-971e-3cf03413b9c4 service nova] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Received event network-changed-e2771821-b75a-44ca-84c8-ad9751ad108f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 790.914178] env[62619]: DEBUG nova.compute.manager [req-ed13f50c-bf8b-42cc-87c9-658dd767cde7 req-a3055c9a-dd08-4fb7-971e-3cf03413b9c4 service nova] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Refreshing instance network info cache due to event network-changed-e2771821-b75a-44ca-84c8-ad9751ad108f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 790.914178] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed13f50c-bf8b-42cc-87c9-658dd767cde7 req-a3055c9a-dd08-4fb7-971e-3cf03413b9c4 service nova] Acquiring lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 790.914178] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed13f50c-bf8b-42cc-87c9-658dd767cde7 req-a3055c9a-dd08-4fb7-971e-3cf03413b9c4 service nova] Acquired lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 790.914178] env[62619]: DEBUG nova.network.neutron [req-ed13f50c-bf8b-42cc-87c9-658dd767cde7 req-a3055c9a-dd08-4fb7-971e-3cf03413b9c4 service nova] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Refreshing network info cache for port e2771821-b75a-44ca-84c8-ad9751ad108f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 790.945607] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 791.802111] env[62619]: ERROR nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. [ 791.802111] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 791.802111] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.802111] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 791.802111] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.802111] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 791.802111] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.802111] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 791.802111] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.802111] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 791.802111] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.802111] env[62619]: ERROR nova.compute.manager raise self.value [ 791.802111] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.802111] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 791.802111] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.802111] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 791.802828] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.802828] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 791.802828] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. [ 791.802828] env[62619]: ERROR nova.compute.manager [ 791.802828] env[62619]: Traceback (most recent call last): [ 791.802828] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 791.802828] env[62619]: listener.cb(fileno) [ 791.802828] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.802828] env[62619]: result = function(*args, **kwargs) [ 791.802828] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 791.802828] env[62619]: return func(*args, **kwargs) [ 791.802828] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 791.802828] env[62619]: raise e [ 791.802828] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.802828] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 791.802828] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.802828] env[62619]: created_port_ids = self._update_ports_for_instance( [ 791.802828] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.802828] env[62619]: with excutils.save_and_reraise_exception(): [ 791.802828] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.802828] env[62619]: self.force_reraise() [ 791.802828] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.802828] env[62619]: raise self.value [ 791.802828] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.802828] env[62619]: updated_port = self._update_port( [ 791.802828] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.802828] env[62619]: _ensure_no_port_binding_failure(port) [ 791.802828] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.802828] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 791.803817] env[62619]: nova.exception.PortBindingFailed: Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. [ 791.803817] env[62619]: Removing descriptor: 16 [ 791.803817] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 791.829183] env[62619]: WARNING nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 791.829364] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance a2ed3e4d-40c2-46b0-9892-0e9cce7b330b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.829490] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 28a37e28-6bca-4647-9cba-345da2f973a2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.829603] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 5d7a9c55-b148-4115-8390-66b2501f859a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.829713] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 0ccdb6cd-d895-42f9-83fa-007c69ce77f9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.829821] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 05a77fe4-172d-4e25-9652-f9dc7cc365ba actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 791.834397] env[62619]: DEBUG nova.network.neutron [req-ed13f50c-bf8b-42cc-87c9-658dd767cde7 req-a3055c9a-dd08-4fb7-971e-3cf03413b9c4 service nova] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 791.837384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 791.844435] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 791.844629] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 791.844782] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 791.844957] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 791.845111] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 791.845251] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 791.845446] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 791.845595] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 791.845750] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 791.845899] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 791.846082] env[62619]: DEBUG nova.virt.hardware [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 791.847192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6b4600-ed89-4d93-a713-07f70f6c5822 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.856434] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77760163-8165-4589-bab6-8f375fbc7a81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 791.870710] env[62619]: ERROR nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Traceback (most recent call last): [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] yield resources [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self.driver.spawn(context, instance, image_meta, [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] vm_ref = self.build_virtual_machine(instance, [ 791.870710] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] for vif in network_info: [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] return self._sync_wrapper(fn, *args, **kwargs) [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self.wait() [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self[:] = self._gt.wait() [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] return self._exit_event.wait() [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 791.871301] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] current.throw(*self._exc) [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] result = function(*args, **kwargs) [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] return func(*args, **kwargs) [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] raise e [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] nwinfo = self.network_api.allocate_for_instance( [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] created_port_ids = self._update_ports_for_instance( [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] with excutils.save_and_reraise_exception(): [ 791.871750] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self.force_reraise() [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] raise self.value [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] updated_port = self._update_port( [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] _ensure_no_port_binding_failure(port) [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] raise exception.PortBindingFailed(port_id=port['id']) [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] nova.exception.PortBindingFailed: Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. [ 791.872212] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] [ 791.872212] env[62619]: INFO nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Terminating instance [ 791.872832] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 792.011212] env[62619]: DEBUG nova.network.neutron [req-ed13f50c-bf8b-42cc-87c9-658dd767cde7 req-a3055c9a-dd08-4fb7-971e-3cf03413b9c4 service nova] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 792.332642] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance cc6bdf77-2540-47e3-aed2-cb0c73b329cc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 792.515927] env[62619]: DEBUG oslo_concurrency.lockutils [req-ed13f50c-bf8b-42cc-87c9-658dd767cde7 req-a3055c9a-dd08-4fb7-971e-3cf03413b9c4 service nova] Releasing lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 792.516383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 792.516562] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 792.835871] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 1def15dd-e2ad-470e-bff8-9121df881d46 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.034270] env[62619]: DEBUG nova.compute.manager [req-35932ca7-80e1-4fe3-9513-a0c8a864d1d6 req-33309149-1ca4-47a8-8f76-f64ad3edd0ac service nova] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Received event network-vif-deleted-e2771821-b75a-44ca-84c8-ad9751ad108f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 793.041646] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.185708] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 793.339090] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 793.690706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 793.691134] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 793.691337] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 793.691653] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ae7cbeb4-b6e8-4b39-b683-7c39b52b5660 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.701565] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5defb3ab-61d7-4bbb-9715-21902d2986f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 793.724973] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 05a77fe4-172d-4e25-9652-f9dc7cc365ba could not be found. [ 793.725208] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 793.725411] env[62619]: INFO nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Took 0.03 seconds to destroy the instance on the hypervisor. [ 793.725665] env[62619]: DEBUG oslo.service.loopingcall [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 793.725876] env[62619]: DEBUG nova.compute.manager [-] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 793.725965] env[62619]: DEBUG nova.network.neutron [-] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 793.749634] env[62619]: DEBUG nova.network.neutron [-] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 793.845098] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 55316613-3507-4386-a7f9-dbcc52f26327 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.252747] env[62619]: DEBUG nova.network.neutron [-] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 794.352581] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 49fd766a-d798-415e-b5eb-4ad4fe7934c0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 794.755094] env[62619]: INFO nova.compute.manager [-] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Took 1.03 seconds to deallocate network for instance. [ 794.757688] env[62619]: DEBUG nova.compute.claims [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 794.757866] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 794.856615] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance d7ddbbe2-2bea-4fa1-938c-a344f49f0178 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.360300] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 42e05759-742f-4732-97cb-cab2cfb06996 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 795.863909] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 30045f41-3396-47cb-833d-b5b434c3671b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.368163] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 9fd66533-39ff-401d-81ef-f37eaceb3103 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 796.874051] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 61d68c36-5251-4fad-9d3b-125296ae0861 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.379921] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.883965] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance c30e0db3-9b63-44b7-9b7f-810defc530d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 797.883965] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 797.884130] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 798.195501] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a235357-9f8c-4cef-ae87-b6e5536604a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.203242] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea24dfd7-a496-450f-8ae3-8beb7190b130 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.232732] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0824bc2c-0598-45ef-82a9-e8a17d9dfd5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.240586] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19093bc5-6ae8-467e-a1de-6f5ae69a2ea6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 798.253650] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 798.758499] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 798.941951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquiring lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 798.942207] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.263903] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 799.264199] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 9.190s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 799.264490] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.985s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 799.266572] env[62619]: INFO nova.compute.claims [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 799.269899] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 799.269899] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Cleaning up deleted instances {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 799.775600] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] There are 4 instances to clean {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 799.775992] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 9735d6d1-eb10-46b4-a273-10b1351033f0] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 800.282871] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: d7404720-7cf0-41bf-a882-2cb6db2253bc] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 800.497425] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f8167e0-558c-4178-babf-b01c2673c5c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.505869] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6561070c-7fb9-41f2-b870-a1c2eca88612 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.534342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8570c4e9-dda6-4629-9af7-26d2eb85def5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.541380] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-432739cb-6f9a-4de9-8d75-b51965ae1823 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 800.554139] env[62619]: DEBUG nova.compute.provider_tree [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 800.785802] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: b7c425a1-a80d-4a62-a71f-d14fdf638cf7] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 801.057802] env[62619]: DEBUG nova.scheduler.client.report [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 801.288878] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 79a94ed1-1139-4194-8091-00b7b1562330] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 801.562737] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.297s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.562737] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 801.565499] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 24.939s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.565832] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 801.567660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 24.735s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 801.589515] env[62619]: INFO nova.scheduler.client.report [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Deleted allocations for instance dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe [ 801.792163] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 801.792391] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Cleaning up deleted instances with incomplete migration {{(pid=62619) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 802.072050] env[62619]: DEBUG nova.compute.utils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 802.076833] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 802.076833] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 802.098284] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dd3bdff9-ab79-47a4-834c-22e90a90d606 tempest-ServerShowV257Test-1984098576 tempest-ServerShowV257Test-1984098576-project-member] Lock "dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 29.304s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 802.126246] env[62619]: DEBUG nova.policy [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afc726cf99504d508762f07d70be1b14', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '71e96b29cd0348778e223b6447e131ba', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 802.275923] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9002f8d3-7a53-457c-a10a-5555cca15014 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.283559] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0e891e-e36c-44f5-b4bd-d027e34dafd0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.311286] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 802.312634] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45317772-b055-421d-99dd-4bb75724e4bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.319428] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9efc8ee6-b262-4974-8c98-aecd8d41978f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 802.332388] env[62619]: DEBUG nova.compute.provider_tree [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 802.409946] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Successfully created port: 7eb95b58-08ae-4ef3-a5eb-4733292345ba {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 802.577562] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 802.835705] env[62619]: DEBUG nova.scheduler.client.report [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 803.223061] env[62619]: DEBUG nova.compute.manager [req-a33521b0-0980-4d9b-8fd3-129a22c86b47 req-018fed96-5152-42f3-b793-a7eb8674f7ee service nova] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Received event network-changed-7eb95b58-08ae-4ef3-a5eb-4733292345ba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 803.223270] env[62619]: DEBUG nova.compute.manager [req-a33521b0-0980-4d9b-8fd3-129a22c86b47 req-018fed96-5152-42f3-b793-a7eb8674f7ee service nova] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Refreshing instance network info cache due to event network-changed-7eb95b58-08ae-4ef3-a5eb-4733292345ba. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 803.223497] env[62619]: DEBUG oslo_concurrency.lockutils [req-a33521b0-0980-4d9b-8fd3-129a22c86b47 req-018fed96-5152-42f3-b793-a7eb8674f7ee service nova] Acquiring lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.223650] env[62619]: DEBUG oslo_concurrency.lockutils [req-a33521b0-0980-4d9b-8fd3-129a22c86b47 req-018fed96-5152-42f3-b793-a7eb8674f7ee service nova] Acquired lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.223780] env[62619]: DEBUG nova.network.neutron [req-a33521b0-0980-4d9b-8fd3-129a22c86b47 req-018fed96-5152-42f3-b793-a7eb8674f7ee service nova] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Refreshing network info cache for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 803.340888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.773s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 803.341572] env[62619]: ERROR nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Traceback (most recent call last): [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self.driver.spawn(context, instance, image_meta, [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] vm_ref = self.build_virtual_machine(instance, [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.341572] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] for vif in network_info: [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] return self._sync_wrapper(fn, *args, **kwargs) [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self.wait() [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self[:] = self._gt.wait() [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] return self._exit_event.wait() [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] current.throw(*self._exc) [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.341931] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] result = function(*args, **kwargs) [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] return func(*args, **kwargs) [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] raise e [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] nwinfo = self.network_api.allocate_for_instance( [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] created_port_ids = self._update_ports_for_instance( [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] with excutils.save_and_reraise_exception(): [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] self.force_reraise() [ 803.342307] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] raise self.value [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] updated_port = self._update_port( [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] _ensure_no_port_binding_failure(port) [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] raise exception.PortBindingFailed(port_id=port['id']) [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] nova.exception.PortBindingFailed: Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. [ 803.342596] env[62619]: ERROR nova.compute.manager [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] [ 803.342596] env[62619]: DEBUG nova.compute.utils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 803.343787] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.592s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 803.345756] env[62619]: INFO nova.compute.claims [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 803.348933] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Build of instance a2ed3e4d-40c2-46b0-9892-0e9cce7b330b was re-scheduled: Binding failed for port 6d0bb7c9-6909-4052-a644-3a785374f724, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 803.350532] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 803.350532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquiring lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.350532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Acquired lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 803.350532] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.409107] env[62619]: ERROR nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. [ 803.409107] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 803.409107] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.409107] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 803.409107] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.409107] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 803.409107] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.409107] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 803.409107] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.409107] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 803.409107] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.409107] env[62619]: ERROR nova.compute.manager raise self.value [ 803.409107] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.409107] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 803.409107] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.409107] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 803.409548] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.409548] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 803.409548] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. [ 803.409548] env[62619]: ERROR nova.compute.manager [ 803.409548] env[62619]: Traceback (most recent call last): [ 803.409548] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 803.409548] env[62619]: listener.cb(fileno) [ 803.409548] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.409548] env[62619]: result = function(*args, **kwargs) [ 803.409548] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.409548] env[62619]: return func(*args, **kwargs) [ 803.409548] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.409548] env[62619]: raise e [ 803.409548] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.409548] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 803.409548] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.409548] env[62619]: created_port_ids = self._update_ports_for_instance( [ 803.409548] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.409548] env[62619]: with excutils.save_and_reraise_exception(): [ 803.409548] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.409548] env[62619]: self.force_reraise() [ 803.409548] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.409548] env[62619]: raise self.value [ 803.409548] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.409548] env[62619]: updated_port = self._update_port( [ 803.409548] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.409548] env[62619]: _ensure_no_port_binding_failure(port) [ 803.409548] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.409548] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 803.410145] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. [ 803.410145] env[62619]: Removing descriptor: 16 [ 803.589746] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 803.615460] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 803.615695] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 803.615846] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 803.616033] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 803.616177] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 803.616312] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 803.616507] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 803.616664] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 803.616822] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 803.616977] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 803.617242] env[62619]: DEBUG nova.virt.hardware [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 803.618509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-331f4cc1-b3cb-4b69-a472-9708cc99d884 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.627057] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6cd6967-1c23-4100-a2c8-0676d933db1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.640651] env[62619]: ERROR nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Traceback (most recent call last): [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] yield resources [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self.driver.spawn(context, instance, image_meta, [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] vm_ref = self.build_virtual_machine(instance, [ 803.640651] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] vif_infos = vmwarevif.get_vif_info(self._session, [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] for vif in network_info: [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] return self._sync_wrapper(fn, *args, **kwargs) [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self.wait() [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self[:] = self._gt.wait() [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] return self._exit_event.wait() [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 803.640940] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] current.throw(*self._exc) [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] result = function(*args, **kwargs) [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] return func(*args, **kwargs) [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] raise e [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] nwinfo = self.network_api.allocate_for_instance( [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] created_port_ids = self._update_ports_for_instance( [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] with excutils.save_and_reraise_exception(): [ 803.641216] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self.force_reraise() [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] raise self.value [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] updated_port = self._update_port( [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] _ensure_no_port_binding_failure(port) [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] raise exception.PortBindingFailed(port_id=port['id']) [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] nova.exception.PortBindingFailed: Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. [ 803.641478] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] [ 803.641478] env[62619]: INFO nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Terminating instance [ 803.643041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Acquiring lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 803.742520] env[62619]: DEBUG nova.network.neutron [req-a33521b0-0980-4d9b-8fd3-129a22c86b47 req-018fed96-5152-42f3-b793-a7eb8674f7ee service nova] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.847499] env[62619]: DEBUG nova.network.neutron [req-a33521b0-0980-4d9b-8fd3-129a22c86b47 req-018fed96-5152-42f3-b793-a7eb8674f7ee service nova] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.867968] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.909828] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.351301] env[62619]: DEBUG oslo_concurrency.lockutils [req-a33521b0-0980-4d9b-8fd3-129a22c86b47 req-018fed96-5152-42f3-b793-a7eb8674f7ee service nova] Releasing lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.351798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Acquired lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 804.352036] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 804.412434] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Releasing lock "refresh_cache-a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 804.412809] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 804.412999] env[62619]: DEBUG nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 804.413185] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 804.430844] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.556401] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd76af5e-9eff-4ba8-bcf1-c401a3a6b3d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.564138] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59bd9cc0-5836-46b6-a662-8612f39d5adc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.594024] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2db55c5-c456-4e61-996e-50e6f94edbf0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.601127] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37bddd20-3459-4276-a546-418c26ec5375 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.613512] env[62619]: DEBUG nova.compute.provider_tree [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.868474] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 804.933602] env[62619]: DEBUG nova.network.neutron [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 804.935352] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.116318] env[62619]: DEBUG nova.scheduler.client.report [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 805.247283] env[62619]: DEBUG nova.compute.manager [req-4f6b4dd9-6626-4d54-ac4a-4019ed4425e3 req-1822968b-e29c-47ad-b6c4-900c63550a39 service nova] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Received event network-vif-deleted-7eb95b58-08ae-4ef3-a5eb-4733292345ba {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 805.437225] env[62619]: INFO nova.compute.manager [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] [instance: a2ed3e4d-40c2-46b0-9892-0e9cce7b330b] Took 1.02 seconds to deallocate network for instance. [ 805.439942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Releasing lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 805.440346] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 805.440543] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 805.441017] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f10b0f8b-6b1e-4295-b883-b18e545a4099 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.450467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3547a443-5689-4910-9856-64eee45012cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.473242] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cc6bdf77-2540-47e3-aed2-cb0c73b329cc could not be found. [ 805.473470] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 805.473652] env[62619]: INFO nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Took 0.03 seconds to destroy the instance on the hypervisor. [ 805.473889] env[62619]: DEBUG oslo.service.loopingcall [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 805.474711] env[62619]: DEBUG nova.compute.manager [-] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 805.474813] env[62619]: DEBUG nova.network.neutron [-] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 805.489496] env[62619]: DEBUG nova.network.neutron [-] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.621634] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.278s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 805.622346] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 805.625753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 25.040s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 805.626490] env[62619]: INFO nova.compute.claims [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.992201] env[62619]: DEBUG nova.network.neutron [-] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 806.131033] env[62619]: DEBUG nova.compute.utils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 806.134413] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 806.134588] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 806.173627] env[62619]: DEBUG nova.policy [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ed10513d68d24cae96b6cd8f5c6928f9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac3ac5b183cc4193a20406d04ffb5374', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 806.476784] env[62619]: INFO nova.scheduler.client.report [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Deleted allocations for instance a2ed3e4d-40c2-46b0-9892-0e9cce7b330b [ 806.494161] env[62619]: INFO nova.compute.manager [-] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Took 1.02 seconds to deallocate network for instance. [ 806.500384] env[62619]: DEBUG nova.compute.claims [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 806.500610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 806.519223] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Successfully created port: e88f3386-ea87-4b22-9c58-774374a20367 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.641232] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 806.847804] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac157fd5-13a3-4508-87b2-3fa2c1b11d11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.855695] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e759fd-736c-4112-beea-15f5d6976e68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.886509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44a2ae9f-3c7b-4166-9353-76b3bce0b75b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.894511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8b7d1a2-0920-4907-9326-8d197551d931 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.907777] env[62619]: DEBUG nova.compute.provider_tree [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 806.986644] env[62619]: DEBUG oslo_concurrency.lockutils [None req-879d2342-748c-4b3e-9606-43a08a91af7d tempest-MigrationsAdminTest-1820001251 tempest-MigrationsAdminTest-1820001251-project-member] Lock "a2ed3e4d-40c2-46b0-9892-0e9cce7b330b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 133.467s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.288565] env[62619]: DEBUG nova.compute.manager [req-5a616289-4918-4011-9d03-37d796e29182 req-e0234621-6e37-48cc-a3dc-f765f76f7602 service nova] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Received event network-changed-e88f3386-ea87-4b22-9c58-774374a20367 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 807.288834] env[62619]: DEBUG nova.compute.manager [req-5a616289-4918-4011-9d03-37d796e29182 req-e0234621-6e37-48cc-a3dc-f765f76f7602 service nova] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Refreshing instance network info cache due to event network-changed-e88f3386-ea87-4b22-9c58-774374a20367. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 807.289041] env[62619]: DEBUG oslo_concurrency.lockutils [req-5a616289-4918-4011-9d03-37d796e29182 req-e0234621-6e37-48cc-a3dc-f765f76f7602 service nova] Acquiring lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.289191] env[62619]: DEBUG oslo_concurrency.lockutils [req-5a616289-4918-4011-9d03-37d796e29182 req-e0234621-6e37-48cc-a3dc-f765f76f7602 service nova] Acquired lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 807.289357] env[62619]: DEBUG nova.network.neutron [req-5a616289-4918-4011-9d03-37d796e29182 req-e0234621-6e37-48cc-a3dc-f765f76f7602 service nova] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Refreshing network info cache for port e88f3386-ea87-4b22-9c58-774374a20367 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 807.413717] env[62619]: DEBUG nova.scheduler.client.report [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 807.492973] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 807.637501] env[62619]: ERROR nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. [ 807.637501] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 807.637501] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.637501] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 807.637501] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 807.637501] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 807.637501] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 807.637501] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 807.637501] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.637501] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 807.637501] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.637501] env[62619]: ERROR nova.compute.manager raise self.value [ 807.637501] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 807.637501] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 807.637501] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.637501] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 807.637866] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.637866] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 807.637866] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. [ 807.637866] env[62619]: ERROR nova.compute.manager [ 807.637866] env[62619]: Traceback (most recent call last): [ 807.637866] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 807.637866] env[62619]: listener.cb(fileno) [ 807.637866] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.637866] env[62619]: result = function(*args, **kwargs) [ 807.637866] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 807.637866] env[62619]: return func(*args, **kwargs) [ 807.637866] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 807.637866] env[62619]: raise e [ 807.637866] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.637866] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 807.637866] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 807.637866] env[62619]: created_port_ids = self._update_ports_for_instance( [ 807.637866] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 807.637866] env[62619]: with excutils.save_and_reraise_exception(): [ 807.637866] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.637866] env[62619]: self.force_reraise() [ 807.637866] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.637866] env[62619]: raise self.value [ 807.637866] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 807.637866] env[62619]: updated_port = self._update_port( [ 807.637866] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.637866] env[62619]: _ensure_no_port_binding_failure(port) [ 807.637866] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.637866] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 807.638529] env[62619]: nova.exception.PortBindingFailed: Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. [ 807.638529] env[62619]: Removing descriptor: 16 [ 807.651194] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 807.692114] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 807.692384] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 807.692538] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 807.692718] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 807.692859] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 807.692998] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 807.693220] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 807.693376] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 807.693540] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 807.693700] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 807.693867] env[62619]: DEBUG nova.virt.hardware [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 807.694735] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc10d23b-2c5e-4f0e-9dfa-a43369877c85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.703123] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d377ce2a-fc0e-4c2f-8d2b-9275198d967a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.716562] env[62619]: ERROR nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Traceback (most recent call last): [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] yield resources [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self.driver.spawn(context, instance, image_meta, [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self._vmops.spawn(context, instance, image_meta, injected_files, [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] vm_ref = self.build_virtual_machine(instance, [ 807.716562] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] vif_infos = vmwarevif.get_vif_info(self._session, [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] for vif in network_info: [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] return self._sync_wrapper(fn, *args, **kwargs) [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self.wait() [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self[:] = self._gt.wait() [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] return self._exit_event.wait() [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 807.716897] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] current.throw(*self._exc) [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] result = function(*args, **kwargs) [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] return func(*args, **kwargs) [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] raise e [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] nwinfo = self.network_api.allocate_for_instance( [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] created_port_ids = self._update_ports_for_instance( [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] with excutils.save_and_reraise_exception(): [ 807.717254] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self.force_reraise() [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] raise self.value [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] updated_port = self._update_port( [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] _ensure_no_port_binding_failure(port) [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] raise exception.PortBindingFailed(port_id=port['id']) [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] nova.exception.PortBindingFailed: Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. [ 807.717577] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] [ 807.717577] env[62619]: INFO nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Terminating instance [ 807.718942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Acquiring lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 807.810730] env[62619]: DEBUG nova.network.neutron [req-5a616289-4918-4011-9d03-37d796e29182 req-e0234621-6e37-48cc-a3dc-f765f76f7602 service nova] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 807.909872] env[62619]: DEBUG nova.network.neutron [req-5a616289-4918-4011-9d03-37d796e29182 req-e0234621-6e37-48cc-a3dc-f765f76f7602 service nova] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.918208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.293s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 807.918731] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 807.921607] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.524s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 808.014119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 808.412731] env[62619]: DEBUG oslo_concurrency.lockutils [req-5a616289-4918-4011-9d03-37d796e29182 req-e0234621-6e37-48cc-a3dc-f765f76f7602 service nova] Releasing lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 808.413385] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Acquired lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 808.413681] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 808.425502] env[62619]: DEBUG nova.compute.utils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 808.430152] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 808.430359] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 808.479985] env[62619]: DEBUG nova.policy [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1951cb4058114ac695f1ebc8980135bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62daf278f23642d3b3210ed2bfa85311', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 808.653948] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716aa0fa-2545-4014-b5e4-ee69bd58bcbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.662631] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18abf8a8-3410-40cf-89bb-53eb21daf7ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.694770] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-648235b8-1dfe-4bab-bad8-9773c80f20af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.703387] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8851acec-2b5b-4593-9884-46f4ae81a6db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 808.716619] env[62619]: DEBUG nova.compute.provider_tree [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 808.807889] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Successfully created port: a3cf79b8-ba17-4716-ba0a-08056b13fe17 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 808.933152] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 808.943711] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 809.069053] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.230114] env[62619]: DEBUG nova.scheduler.client.report [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 809.317497] env[62619]: DEBUG nova.compute.manager [req-09aee876-a0e5-461f-906e-8e41b74cf66e req-00abdbf2-9753-4e58-bf21-17ab28bb4fc2 service nova] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Received event network-vif-deleted-e88f3386-ea87-4b22-9c58-774374a20367 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 809.571921] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Releasing lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 809.572529] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 809.572664] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 809.572906] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ecfd044a-1127-4489-a4bb-81d1d7b737dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.585226] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bee076c3-09af-46e5-a1ce-ee93692a5cc8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.611650] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1def15dd-e2ad-470e-bff8-9121df881d46 could not be found. [ 809.611650] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 809.612066] env[62619]: INFO nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Took 0.04 seconds to destroy the instance on the hypervisor. [ 809.612246] env[62619]: DEBUG oslo.service.loopingcall [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 809.612485] env[62619]: DEBUG nova.compute.manager [-] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 809.612579] env[62619]: DEBUG nova.network.neutron [-] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 809.635092] env[62619]: DEBUG nova.network.neutron [-] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 809.735902] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.814s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 809.736842] env[62619]: ERROR nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Traceback (most recent call last): [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self.driver.spawn(context, instance, image_meta, [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self._vmops.spawn(context, instance, image_meta, injected_files, [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] vm_ref = self.build_virtual_machine(instance, [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] vif_infos = vmwarevif.get_vif_info(self._session, [ 809.736842] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] for vif in network_info: [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] return self._sync_wrapper(fn, *args, **kwargs) [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self.wait() [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self[:] = self._gt.wait() [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] return self._exit_event.wait() [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] result = hub.switch() [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 809.737161] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] return self.greenlet.switch() [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] result = function(*args, **kwargs) [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] return func(*args, **kwargs) [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] raise e [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] nwinfo = self.network_api.allocate_for_instance( [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] created_port_ids = self._update_ports_for_instance( [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] with excutils.save_and_reraise_exception(): [ 809.737455] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] self.force_reraise() [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] raise self.value [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] updated_port = self._update_port( [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] _ensure_no_port_binding_failure(port) [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] raise exception.PortBindingFailed(port_id=port['id']) [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] nova.exception.PortBindingFailed: Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. [ 809.737750] env[62619]: ERROR nova.compute.manager [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] [ 809.738021] env[62619]: DEBUG nova.compute.utils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 809.740192] env[62619]: ERROR nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. [ 809.740192] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 809.740192] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 809.740192] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 809.740192] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 809.740192] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 809.740192] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 809.740192] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 809.740192] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.740192] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 809.740192] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.740192] env[62619]: ERROR nova.compute.manager raise self.value [ 809.740192] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 809.740192] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 809.740192] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.740192] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 809.740614] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.740614] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 809.740614] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. [ 809.740614] env[62619]: ERROR nova.compute.manager [ 809.740614] env[62619]: Traceback (most recent call last): [ 809.740614] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 809.740614] env[62619]: listener.cb(fileno) [ 809.740614] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 809.740614] env[62619]: result = function(*args, **kwargs) [ 809.740614] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 809.740614] env[62619]: return func(*args, **kwargs) [ 809.740614] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 809.740614] env[62619]: raise e [ 809.740614] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 809.740614] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 809.740614] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 809.740614] env[62619]: created_port_ids = self._update_ports_for_instance( [ 809.740614] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 809.740614] env[62619]: with excutils.save_and_reraise_exception(): [ 809.740614] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 809.740614] env[62619]: self.force_reraise() [ 809.740614] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 809.740614] env[62619]: raise self.value [ 809.740614] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 809.740614] env[62619]: updated_port = self._update_port( [ 809.740614] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 809.740614] env[62619]: _ensure_no_port_binding_failure(port) [ 809.740614] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 809.740614] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 809.741356] env[62619]: nova.exception.PortBindingFailed: Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. [ 809.741356] env[62619]: Removing descriptor: 16 [ 809.741356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.643s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 809.749020] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Build of instance 28a37e28-6bca-4647-9cba-345da2f973a2 was re-scheduled: Binding failed for port 996abc5d-953f-4c72-a76c-b2b161d80b85, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 809.749020] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 809.749020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 809.749020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 809.749274] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 809.950385] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 809.976037] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 809.976164] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 809.976313] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 809.976493] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 809.976637] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 809.976780] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 809.976980] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 809.977147] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 809.977307] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 809.977490] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 809.977674] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 809.978526] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d22ecb2-9ed3-4386-83cb-356b884f1039 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 809.986552] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d32c7d5-ddca-4b79-9a06-55fb73466191 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.000157] env[62619]: ERROR nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Traceback (most recent call last): [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] yield resources [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self.driver.spawn(context, instance, image_meta, [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] vm_ref = self.build_virtual_machine(instance, [ 810.000157] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] for vif in network_info: [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] return self._sync_wrapper(fn, *args, **kwargs) [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self.wait() [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self[:] = self._gt.wait() [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] return self._exit_event.wait() [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 810.000566] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] current.throw(*self._exc) [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] result = function(*args, **kwargs) [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] return func(*args, **kwargs) [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] raise e [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] nwinfo = self.network_api.allocate_for_instance( [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] created_port_ids = self._update_ports_for_instance( [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] with excutils.save_and_reraise_exception(): [ 810.000902] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self.force_reraise() [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] raise self.value [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] updated_port = self._update_port( [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] _ensure_no_port_binding_failure(port) [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] raise exception.PortBindingFailed(port_id=port['id']) [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] nova.exception.PortBindingFailed: Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. [ 810.001227] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] [ 810.001227] env[62619]: INFO nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Terminating instance [ 810.002475] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 810.002635] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquired lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 810.002798] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 810.138284] env[62619]: DEBUG nova.network.neutron [-] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.276383] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 810.436085] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.525080] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 810.563794] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed1cfc09-bb4f-4efd-9077-08168155afc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.572023] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07f8f84a-7ebf-454b-ba5e-4c61d2c55102 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.599388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04ad637e-c5fd-4c85-8756-508401316df9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.606918] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe3f0128-1e45-4b17-bfe4-8438d02364a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 810.621986] env[62619]: DEBUG nova.compute.provider_tree [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 810.640118] env[62619]: INFO nova.compute.manager [-] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Took 1.03 seconds to deallocate network for instance. [ 810.642963] env[62619]: DEBUG nova.compute.claims [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 810.643160] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 810.682725] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 810.942173] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-28a37e28-6bca-4647-9cba-345da2f973a2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 810.942173] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 810.942173] env[62619]: DEBUG nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 810.942173] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 810.960269] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.125230] env[62619]: DEBUG nova.scheduler.client.report [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 811.190986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Releasing lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 811.191043] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 811.191870] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 811.191870] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-838ffc66-0933-409d-b6fa-fe26991130a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.203499] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75476dc8-52ef-4a3f-8cc7-5377581a761c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 811.228837] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb could not be found. [ 811.229104] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 811.229291] env[62619]: INFO nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Took 0.04 seconds to destroy the instance on the hypervisor. [ 811.229603] env[62619]: DEBUG oslo.service.loopingcall [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 811.229847] env[62619]: DEBUG nova.compute.manager [-] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 811.229945] env[62619]: DEBUG nova.network.neutron [-] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 811.252078] env[62619]: DEBUG nova.network.neutron [-] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 811.344577] env[62619]: DEBUG nova.compute.manager [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Received event network-changed-a3cf79b8-ba17-4716-ba0a-08056b13fe17 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 811.344777] env[62619]: DEBUG nova.compute.manager [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Refreshing instance network info cache due to event network-changed-a3cf79b8-ba17-4716-ba0a-08056b13fe17. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 811.345049] env[62619]: DEBUG oslo_concurrency.lockutils [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] Acquiring lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.345748] env[62619]: DEBUG oslo_concurrency.lockutils [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] Acquired lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.346040] env[62619]: DEBUG nova.network.neutron [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Refreshing network info cache for port a3cf79b8-ba17-4716-ba0a-08056b13fe17 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 811.463439] env[62619]: DEBUG nova.network.neutron [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.630065] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.889s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 811.630730] env[62619]: ERROR nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Traceback (most recent call last): [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self.driver.spawn(context, instance, image_meta, [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] vm_ref = self.build_virtual_machine(instance, [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] vif_infos = vmwarevif.get_vif_info(self._session, [ 811.630730] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] for vif in network_info: [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] return self._sync_wrapper(fn, *args, **kwargs) [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self.wait() [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self[:] = self._gt.wait() [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] return self._exit_event.wait() [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] result = hub.switch() [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 811.631030] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] return self.greenlet.switch() [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] result = function(*args, **kwargs) [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] return func(*args, **kwargs) [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] raise e [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] nwinfo = self.network_api.allocate_for_instance( [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] created_port_ids = self._update_ports_for_instance( [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] with excutils.save_and_reraise_exception(): [ 811.631295] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] self.force_reraise() [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] raise self.value [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] updated_port = self._update_port( [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] _ensure_no_port_binding_failure(port) [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] raise exception.PortBindingFailed(port_id=port['id']) [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] nova.exception.PortBindingFailed: Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. [ 811.631558] env[62619]: ERROR nova.compute.manager [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] [ 811.631783] env[62619]: DEBUG nova.compute.utils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 811.632701] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 24.405s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 811.634106] env[62619]: INFO nova.compute.claims [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 811.640025] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Build of instance 5d7a9c55-b148-4115-8390-66b2501f859a was re-scheduled: Binding failed for port 4ee99cf4-64eb-4671-8b04-cabe02112919, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 811.640025] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 811.640025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Acquiring lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 811.640025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Acquired lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 811.640435] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 811.754391] env[62619]: DEBUG nova.network.neutron [-] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 811.968677] env[62619]: INFO nova.compute.manager [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 28a37e28-6bca-4647-9cba-345da2f973a2] Took 1.03 seconds to deallocate network for instance. [ 812.011574] env[62619]: DEBUG nova.network.neutron [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.151724] env[62619]: DEBUG nova.network.neutron [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.171242] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.251543] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 812.257026] env[62619]: INFO nova.compute.manager [-] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Took 1.03 seconds to deallocate network for instance. [ 812.264083] env[62619]: DEBUG nova.compute.claims [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 812.264083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 812.654152] env[62619]: DEBUG oslo_concurrency.lockutils [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] Releasing lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.654440] env[62619]: DEBUG nova.compute.manager [req-95ccc933-461c-44fc-9c0a-28eef9d3c2e9 req-e2275525-188a-40f4-adbc-4308a68ed7b7 service nova] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Received event network-vif-deleted-a3cf79b8-ba17-4716-ba0a-08056b13fe17 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 812.753846] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Releasing lock "refresh_cache-5d7a9c55-b148-4115-8390-66b2501f859a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 812.753928] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 812.754109] env[62619]: DEBUG nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 812.754283] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 812.772718] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 812.857143] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26ee9ffb-53f7-4122-b425-9793a5deb2c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.864787] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f305eb9-b9d5-46fa-98f9-d0010e9473a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.896279] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d67a15c2-d74a-4446-8597-23a5f80db2cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.903647] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56e02435-e475-45b2-b855-39d681ead93e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 812.917370] env[62619]: DEBUG nova.compute.provider_tree [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 812.998694] env[62619]: INFO nova.scheduler.client.report [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Deleted allocations for instance 28a37e28-6bca-4647-9cba-345da2f973a2 [ 813.275655] env[62619]: DEBUG nova.network.neutron [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 813.420277] env[62619]: DEBUG nova.scheduler.client.report [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 813.506987] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6ad48ae1-53c8-4186-bda1-04674439e644 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "28a37e28-6bca-4647-9cba-345da2f973a2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 126.030s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.778403] env[62619]: INFO nova.compute.manager [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] [instance: 5d7a9c55-b148-4115-8390-66b2501f859a] Took 1.02 seconds to deallocate network for instance. [ 813.925326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.292s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 813.926146] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 813.929122] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.831s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 814.011573] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 814.434364] env[62619]: DEBUG nova.compute.utils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 814.439140] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 814.439566] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 814.504069] env[62619]: DEBUG nova.policy [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '1951cb4058114ac695f1ebc8980135bb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '62daf278f23642d3b3210ed2bfa85311', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 814.534421] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 814.651523] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a68ddf5-da2a-4d66-b0a3-188642226f64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.659066] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb63c74-e660-4943-b94c-98f32c6b15fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.690814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54e92253-0a21-473c-bbd2-d3039062f5a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.697866] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5e17f61-277b-4cfa-ad8c-901dfcf02a3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.711621] env[62619]: DEBUG nova.compute.provider_tree [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 814.739261] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Successfully created port: 2689bc08-3f1a-4b93-9145-d9789fb216cb {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 814.808725] env[62619]: INFO nova.scheduler.client.report [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Deleted allocations for instance 5d7a9c55-b148-4115-8390-66b2501f859a [ 814.943026] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 815.214913] env[62619]: DEBUG nova.scheduler.client.report [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 815.318828] env[62619]: DEBUG oslo_concurrency.lockutils [None req-819aaddf-24f1-4184-b806-5cdf1b45fbc9 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827 tempest-FloatingIPsAssociationNegativeTestJSON-1808336827-project-member] Lock "5d7a9c55-b148-4115-8390-66b2501f859a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 127.179s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.418568] env[62619]: DEBUG nova.compute.manager [req-9e0958ae-d521-408b-ad4d-5bd978e53aa8 req-d7e0730b-eba8-4bf2-be8d-a15bae8c2c6f service nova] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Received event network-changed-2689bc08-3f1a-4b93-9145-d9789fb216cb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 815.418765] env[62619]: DEBUG nova.compute.manager [req-9e0958ae-d521-408b-ad4d-5bd978e53aa8 req-d7e0730b-eba8-4bf2-be8d-a15bae8c2c6f service nova] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Refreshing instance network info cache due to event network-changed-2689bc08-3f1a-4b93-9145-d9789fb216cb. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 815.419460] env[62619]: DEBUG oslo_concurrency.lockutils [req-9e0958ae-d521-408b-ad4d-5bd978e53aa8 req-d7e0730b-eba8-4bf2-be8d-a15bae8c2c6f service nova] Acquiring lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.419634] env[62619]: DEBUG oslo_concurrency.lockutils [req-9e0958ae-d521-408b-ad4d-5bd978e53aa8 req-d7e0730b-eba8-4bf2-be8d-a15bae8c2c6f service nova] Acquired lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.419841] env[62619]: DEBUG nova.network.neutron [req-9e0958ae-d521-408b-ad4d-5bd978e53aa8 req-d7e0730b-eba8-4bf2-be8d-a15bae8c2c6f service nova] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Refreshing network info cache for port 2689bc08-3f1a-4b93-9145-d9789fb216cb {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 815.611954] env[62619]: ERROR nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. [ 815.611954] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 815.611954] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 815.611954] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 815.611954] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 815.611954] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 815.611954] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 815.611954] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 815.611954] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.611954] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 815.611954] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.611954] env[62619]: ERROR nova.compute.manager raise self.value [ 815.611954] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 815.611954] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 815.611954] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 815.611954] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 815.612592] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 815.612592] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 815.612592] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. [ 815.612592] env[62619]: ERROR nova.compute.manager [ 815.612592] env[62619]: Traceback (most recent call last): [ 815.612592] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 815.612592] env[62619]: listener.cb(fileno) [ 815.612592] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 815.612592] env[62619]: result = function(*args, **kwargs) [ 815.612592] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 815.612592] env[62619]: return func(*args, **kwargs) [ 815.612592] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 815.612592] env[62619]: raise e [ 815.612592] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 815.612592] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 815.612592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 815.612592] env[62619]: created_port_ids = self._update_ports_for_instance( [ 815.612592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 815.612592] env[62619]: with excutils.save_and_reraise_exception(): [ 815.612592] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.612592] env[62619]: self.force_reraise() [ 815.612592] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.612592] env[62619]: raise self.value [ 815.612592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 815.612592] env[62619]: updated_port = self._update_port( [ 815.612592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 815.612592] env[62619]: _ensure_no_port_binding_failure(port) [ 815.612592] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 815.612592] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 815.613362] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. [ 815.613362] env[62619]: Removing descriptor: 18 [ 815.722328] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.791s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 815.722328] env[62619]: ERROR nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. [ 815.722328] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Traceback (most recent call last): [ 815.722328] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 815.722328] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self.driver.spawn(context, instance, image_meta, [ 815.722328] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 815.722328] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self._vmops.spawn(context, instance, image_meta, injected_files, [ 815.722328] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 815.722328] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] vm_ref = self.build_virtual_machine(instance, [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] vif_infos = vmwarevif.get_vif_info(self._session, [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] for vif in network_info: [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] return self._sync_wrapper(fn, *args, **kwargs) [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self.wait() [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self[:] = self._gt.wait() [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] return self._exit_event.wait() [ 815.722606] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] result = hub.switch() [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] return self.greenlet.switch() [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] result = function(*args, **kwargs) [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] return func(*args, **kwargs) [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] raise e [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] nwinfo = self.network_api.allocate_for_instance( [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 815.722940] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] created_port_ids = self._update_ports_for_instance( [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] with excutils.save_and_reraise_exception(): [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] self.force_reraise() [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] raise self.value [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] updated_port = self._update_port( [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] _ensure_no_port_binding_failure(port) [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 815.723289] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] raise exception.PortBindingFailed(port_id=port['id']) [ 815.723603] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] nova.exception.PortBindingFailed: Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. [ 815.723603] env[62619]: ERROR nova.compute.manager [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] [ 815.723603] env[62619]: DEBUG nova.compute.utils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 815.724337] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 23.887s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 815.728045] env[62619]: INFO nova.compute.claims [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 815.729737] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Build of instance 0ccdb6cd-d895-42f9-83fa-007c69ce77f9 was re-scheduled: Binding failed for port 11ea45c7-21d2-4b92-b4bb-d36ca07e614b, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 815.730560] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 815.731923] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquiring lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 815.731923] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Acquired lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 815.731923] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 815.821267] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 815.948220] env[62619]: DEBUG nova.network.neutron [req-9e0958ae-d521-408b-ad4d-5bd978e53aa8 req-d7e0730b-eba8-4bf2-be8d-a15bae8c2c6f service nova] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 815.953594] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 815.976286] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 815.977035] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 815.977035] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 815.977035] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 815.977035] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 815.977298] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 815.977546] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 815.977794] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 815.977966] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 815.978142] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 815.978309] env[62619]: DEBUG nova.virt.hardware [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 815.979303] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cfbd33-8e50-49b2-8163-99a42071fb2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.987951] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-002f71c1-df8c-4f51-b12c-ee5e91e94980 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 816.005990] env[62619]: ERROR nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Traceback (most recent call last): [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] yield resources [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self.driver.spawn(context, instance, image_meta, [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self._vmops.spawn(context, instance, image_meta, injected_files, [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] vm_ref = self.build_virtual_machine(instance, [ 816.005990] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] vif_infos = vmwarevif.get_vif_info(self._session, [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] for vif in network_info: [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] return self._sync_wrapper(fn, *args, **kwargs) [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self.wait() [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self[:] = self._gt.wait() [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] return self._exit_event.wait() [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 816.006561] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] current.throw(*self._exc) [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] result = function(*args, **kwargs) [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] return func(*args, **kwargs) [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] raise e [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] nwinfo = self.network_api.allocate_for_instance( [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] created_port_ids = self._update_ports_for_instance( [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] with excutils.save_and_reraise_exception(): [ 816.006913] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self.force_reraise() [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] raise self.value [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] updated_port = self._update_port( [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] _ensure_no_port_binding_failure(port) [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] raise exception.PortBindingFailed(port_id=port['id']) [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] nova.exception.PortBindingFailed: Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. [ 816.007247] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] [ 816.007247] env[62619]: INFO nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Terminating instance [ 816.008151] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 816.224162] env[62619]: DEBUG nova.network.neutron [req-9e0958ae-d521-408b-ad4d-5bd978e53aa8 req-d7e0730b-eba8-4bf2-be8d-a15bae8c2c6f service nova] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.268427] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 816.349845] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.383423] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 816.633817] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "646b4ae6-09e1-4b3c-b17d-392e746df454" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 816.634240] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 816.726412] env[62619]: DEBUG oslo_concurrency.lockutils [req-9e0958ae-d521-408b-ad4d-5bd978e53aa8 req-d7e0730b-eba8-4bf2-be8d-a15bae8c2c6f service nova] Releasing lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.726884] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquired lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 816.727206] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 816.886967] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Releasing lock "refresh_cache-0ccdb6cd-d895-42f9-83fa-007c69ce77f9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 816.887337] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 816.887411] env[62619]: DEBUG nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 816.887601] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 816.906430] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 816.996233] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3c419d3-6d4f-4dd9-8536-0cc3e79a75e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.004688] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-227c1f1b-eea0-48ef-a26f-32cb350a0bde {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.036632] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14d1b7d7-7b0e-4bc6-9240-e3374d8f0a11 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.044173] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6b5f2a-059c-4916-b4f0-014188c58ae7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.057775] env[62619]: DEBUG nova.compute.provider_tree [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 817.265472] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 817.396855] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.413118] env[62619]: DEBUG nova.network.neutron [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 817.485761] env[62619]: DEBUG nova.compute.manager [req-d3e7abf8-aedf-4abf-a0d6-1f12e20b5db9 req-d95848ed-da31-42f4-a042-b3fbd1f6c694 service nova] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Received event network-vif-deleted-2689bc08-3f1a-4b93-9145-d9789fb216cb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 817.560392] env[62619]: DEBUG nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 817.901152] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Releasing lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 817.901612] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 817.901806] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 817.902131] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3e1add0b-54ea-4703-8053-f9f4afa22214 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.913511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96fa18f4-ee1e-414c-886d-4f839415531b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 817.922214] env[62619]: INFO nova.compute.manager [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] [instance: 0ccdb6cd-d895-42f9-83fa-007c69ce77f9] Took 1.03 seconds to deallocate network for instance. [ 817.942725] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 55316613-3507-4386-a7f9-dbcc52f26327 could not be found. [ 817.942978] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 817.943181] env[62619]: INFO nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Took 0.04 seconds to destroy the instance on the hypervisor. [ 817.943432] env[62619]: DEBUG oslo.service.loopingcall [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 817.943677] env[62619]: DEBUG nova.compute.manager [-] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 817.943774] env[62619]: DEBUG nova.network.neutron [-] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 817.966485] env[62619]: DEBUG nova.network.neutron [-] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 818.066301] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 818.066942] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 818.069968] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 23.312s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 818.469491] env[62619]: DEBUG nova.network.neutron [-] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 818.580380] env[62619]: DEBUG nova.compute.utils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 818.582039] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 818.582624] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 818.649221] env[62619]: DEBUG nova.policy [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2d072117ea54a699e2f845b79533dc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c036a4734208450691fba9eb7355f122', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 818.839382] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e27c9308-5971-48b6-b51a-d39d3385ccc6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.848527] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01e5988b-7ba4-439a-871d-b01c1ecb2c70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.886974] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a05816e-bc5a-468a-a548-dae3adcdea18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.896482] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c24c32-76ba-4b60-a1c1-ba2319f7d5ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 818.911669] env[62619]: DEBUG nova.compute.provider_tree [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 818.962598] env[62619]: INFO nova.scheduler.client.report [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Deleted allocations for instance 0ccdb6cd-d895-42f9-83fa-007c69ce77f9 [ 818.973474] env[62619]: INFO nova.compute.manager [-] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Took 1.03 seconds to deallocate network for instance. [ 818.974868] env[62619]: DEBUG nova.compute.claims [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 818.979021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.085948] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 819.127860] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Successfully created port: 74940506-5781-4531-b29c-9caec5027589 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 819.414159] env[62619]: DEBUG nova.scheduler.client.report [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 819.474343] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9c46b3b-3dc3-4e9a-8dfb-d88d40563b1a tempest-AttachVolumeShelveTestJSON-2072563548 tempest-AttachVolumeShelveTestJSON-2072563548-project-member] Lock "0ccdb6cd-d895-42f9-83fa-007c69ce77f9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 115.598s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.586009] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "ca452ef6-d777-46dd-a313-ae7dd441adca" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.586277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.885178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "59b960b6-aa41-4409-a899-9829388c3ff2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 819.886041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "59b960b6-aa41-4409-a899-9829388c3ff2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.921479] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.852s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 819.922367] env[62619]: ERROR nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Traceback (most recent call last): [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self.driver.spawn(context, instance, image_meta, [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self._vmops.spawn(context, instance, image_meta, injected_files, [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] vm_ref = self.build_virtual_machine(instance, [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] vif_infos = vmwarevif.get_vif_info(self._session, [ 819.922367] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] for vif in network_info: [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] return self._sync_wrapper(fn, *args, **kwargs) [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self.wait() [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self[:] = self._gt.wait() [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] return self._exit_event.wait() [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] current.throw(*self._exc) [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 819.922709] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] result = function(*args, **kwargs) [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] return func(*args, **kwargs) [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] raise e [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] nwinfo = self.network_api.allocate_for_instance( [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] created_port_ids = self._update_ports_for_instance( [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] with excutils.save_and_reraise_exception(): [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] self.force_reraise() [ 819.923136] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] raise self.value [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] updated_port = self._update_port( [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] _ensure_no_port_binding_failure(port) [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] raise exception.PortBindingFailed(port_id=port['id']) [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] nova.exception.PortBindingFailed: Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. [ 819.923470] env[62619]: ERROR nova.compute.manager [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] [ 819.923470] env[62619]: DEBUG nova.compute.utils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 819.924129] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.423s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 819.931594] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Build of instance 05a77fe4-172d-4e25-9652-f9dc7cc365ba was re-scheduled: Binding failed for port e2771821-b75a-44ca-84c8-ad9751ad108f, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 819.931865] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 819.935025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 819.935025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 819.935025] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 819.977026] env[62619]: DEBUG nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 820.096148] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 820.125320] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 820.125557] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 820.125724] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 820.125902] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 820.126052] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 820.126194] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 820.126385] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 820.126533] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 820.126835] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 820.127029] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 820.127200] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 820.128206] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3acaba78-e035-4dab-afcb-761a751c60ed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.137853] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b596d59-7271-429f-901f-bdfb31f0709c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.296964] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.297162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.462720] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 820.506555] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.533394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "f60e0bec-0811-4e91-bc45-b61874846497" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 820.533633] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "f60e0bec-0811-4e91-bc45-b61874846497" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 820.539978] env[62619]: DEBUG nova.compute.manager [req-b4cfc848-2afe-40de-abb6-e9a908a266bf req-a03c0421-89c7-4d1a-a668-804e85f9d4b4 service nova] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Received event network-changed-74940506-5781-4531-b29c-9caec5027589 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 820.540207] env[62619]: DEBUG nova.compute.manager [req-b4cfc848-2afe-40de-abb6-e9a908a266bf req-a03c0421-89c7-4d1a-a668-804e85f9d4b4 service nova] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Refreshing instance network info cache due to event network-changed-74940506-5781-4531-b29c-9caec5027589. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 820.540426] env[62619]: DEBUG oslo_concurrency.lockutils [req-b4cfc848-2afe-40de-abb6-e9a908a266bf req-a03c0421-89c7-4d1a-a668-804e85f9d4b4 service nova] Acquiring lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 820.540536] env[62619]: DEBUG oslo_concurrency.lockutils [req-b4cfc848-2afe-40de-abb6-e9a908a266bf req-a03c0421-89c7-4d1a-a668-804e85f9d4b4 service nova] Acquired lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 820.540710] env[62619]: DEBUG nova.network.neutron [req-b4cfc848-2afe-40de-abb6-e9a908a266bf req-a03c0421-89c7-4d1a-a668-804e85f9d4b4 service nova] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Refreshing network info cache for port 74940506-5781-4531-b29c-9caec5027589 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 820.575361] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 820.730880] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58795748-0145-41f4-890a-a786c06bd3a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.738791] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8155f3a2-4994-435a-9880-a2ed94400c91 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.773131] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb2015d5-c0cd-46c8-8431-559c66117c1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.781917] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea23d33b-af90-4b66-8a39-3b9bf2778aa3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 820.798697] env[62619]: DEBUG nova.compute.provider_tree [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 820.831343] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. [ 820.831343] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 820.831343] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 820.831343] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 820.831343] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 820.831343] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 820.831343] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 820.831343] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 820.831343] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.831343] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 820.831343] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.831343] env[62619]: ERROR nova.compute.manager raise self.value [ 820.831343] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 820.831343] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 820.831343] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.831343] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 820.831751] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.831751] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 820.831751] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. [ 820.831751] env[62619]: ERROR nova.compute.manager [ 820.831751] env[62619]: Traceback (most recent call last): [ 820.831751] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 820.831751] env[62619]: listener.cb(fileno) [ 820.831751] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 820.831751] env[62619]: result = function(*args, **kwargs) [ 820.831751] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 820.831751] env[62619]: return func(*args, **kwargs) [ 820.831751] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 820.831751] env[62619]: raise e [ 820.831751] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 820.831751] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 820.831751] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 820.831751] env[62619]: created_port_ids = self._update_ports_for_instance( [ 820.831751] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 820.831751] env[62619]: with excutils.save_and_reraise_exception(): [ 820.831751] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.831751] env[62619]: self.force_reraise() [ 820.831751] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.831751] env[62619]: raise self.value [ 820.831751] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 820.831751] env[62619]: updated_port = self._update_port( [ 820.831751] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.831751] env[62619]: _ensure_no_port_binding_failure(port) [ 820.831751] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.831751] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 820.832384] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. [ 820.832384] env[62619]: Removing descriptor: 18 [ 820.832384] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Traceback (most recent call last): [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] yield resources [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self.driver.spawn(context, instance, image_meta, [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 820.832384] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] vm_ref = self.build_virtual_machine(instance, [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] vif_infos = vmwarevif.get_vif_info(self._session, [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] for vif in network_info: [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] return self._sync_wrapper(fn, *args, **kwargs) [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self.wait() [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self[:] = self._gt.wait() [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] return self._exit_event.wait() [ 820.832808] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] result = hub.switch() [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] return self.greenlet.switch() [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] result = function(*args, **kwargs) [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] return func(*args, **kwargs) [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] raise e [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] nwinfo = self.network_api.allocate_for_instance( [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 820.833187] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] created_port_ids = self._update_ports_for_instance( [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] with excutils.save_and_reraise_exception(): [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self.force_reraise() [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] raise self.value [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] updated_port = self._update_port( [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] _ensure_no_port_binding_failure(port) [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 820.833520] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] raise exception.PortBindingFailed(port_id=port['id']) [ 820.833798] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] nova.exception.PortBindingFailed: Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. [ 820.833798] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] [ 820.833798] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Terminating instance [ 820.835438] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.072192] env[62619]: DEBUG nova.network.neutron [req-b4cfc848-2afe-40de-abb6-e9a908a266bf req-a03c0421-89c7-4d1a-a668-804e85f9d4b4 service nova] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 821.078459] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "refresh_cache-05a77fe4-172d-4e25-9652-f9dc7cc365ba" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.079044] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 821.079044] env[62619]: DEBUG nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 821.079044] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 821.099261] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 821.202483] env[62619]: DEBUG nova.network.neutron [req-b4cfc848-2afe-40de-abb6-e9a908a266bf req-a03c0421-89c7-4d1a-a668-804e85f9d4b4 service nova] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.301932] env[62619]: DEBUG nova.scheduler.client.report [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 821.602573] env[62619]: DEBUG nova.network.neutron [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 821.709044] env[62619]: DEBUG oslo_concurrency.lockutils [req-b4cfc848-2afe-40de-abb6-e9a908a266bf req-a03c0421-89c7-4d1a-a668-804e85f9d4b4 service nova] Releasing lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 821.709044] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquired lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.709044] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 821.806322] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.882s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 821.806970] env[62619]: ERROR nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Traceback (most recent call last): [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self.driver.spawn(context, instance, image_meta, [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] vm_ref = self.build_virtual_machine(instance, [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] vif_infos = vmwarevif.get_vif_info(self._session, [ 821.806970] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] for vif in network_info: [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] return self._sync_wrapper(fn, *args, **kwargs) [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self.wait() [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self[:] = self._gt.wait() [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] return self._exit_event.wait() [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] current.throw(*self._exc) [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 821.807340] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] result = function(*args, **kwargs) [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] return func(*args, **kwargs) [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] raise e [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] nwinfo = self.network_api.allocate_for_instance( [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] created_port_ids = self._update_ports_for_instance( [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] with excutils.save_and_reraise_exception(): [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] self.force_reraise() [ 821.807759] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] raise self.value [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] updated_port = self._update_port( [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] _ensure_no_port_binding_failure(port) [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] raise exception.PortBindingFailed(port_id=port['id']) [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] nova.exception.PortBindingFailed: Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. [ 821.808272] env[62619]: ERROR nova.compute.manager [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] [ 821.808272] env[62619]: DEBUG nova.compute.utils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 821.809096] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.795s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 821.810722] env[62619]: INFO nova.compute.claims [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 821.813030] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Build of instance cc6bdf77-2540-47e3-aed2-cb0c73b329cc was re-scheduled: Binding failed for port 7eb95b58-08ae-4ef3-a5eb-4733292345ba, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 821.813455] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 821.813998] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Acquiring lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 821.813998] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Acquired lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 821.813998] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 822.107705] env[62619]: INFO nova.compute.manager [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 05a77fe4-172d-4e25-9652-f9dc7cc365ba] Took 1.03 seconds to deallocate network for instance. [ 822.227623] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.303418] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.333151] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.415942] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 822.563321] env[62619]: DEBUG nova.compute.manager [req-b695de04-112e-41fd-b96a-cccd1083c60b req-d946a47a-d30d-4b05-9e3e-f49cf70ef0bc service nova] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Received event network-vif-deleted-74940506-5781-4531-b29c-9caec5027589 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 822.807062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Releasing lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.807062] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 822.807212] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 822.807418] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b9b4b375-ba33-4f27-8281-2b231a8b3ac6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.816795] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b6de802-b11c-45b4-bfd3-dfb56067c40a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 822.841530] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 49fd766a-d798-415e-b5eb-4ad4fe7934c0 could not be found. [ 822.841759] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 822.841961] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Took 0.03 seconds to destroy the instance on the hypervisor. [ 822.842213] env[62619]: DEBUG oslo.service.loopingcall [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 822.842422] env[62619]: DEBUG nova.compute.manager [-] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 822.842512] env[62619]: DEBUG nova.network.neutron [-] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 822.857644] env[62619]: DEBUG nova.network.neutron [-] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 822.918759] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Releasing lock "refresh_cache-cc6bdf77-2540-47e3-aed2-cb0c73b329cc" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 822.918991] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 822.919190] env[62619]: DEBUG nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 822.919354] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 822.935688] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 823.043457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa4aa972-0300-4013-ba15-f759d0469476 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.051141] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c07e6fd-dc2b-4bf2-b3e5-3729e7512c03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.080334] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9bb0693-9687-4619-992f-a377ffa9973b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.088017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73fff579-46bc-46b4-bcce-7620d8d894c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 823.100923] env[62619]: DEBUG nova.compute.provider_tree [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 823.133163] env[62619]: INFO nova.scheduler.client.report [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted allocations for instance 05a77fe4-172d-4e25-9652-f9dc7cc365ba [ 823.361946] env[62619]: DEBUG nova.network.neutron [-] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.440538] env[62619]: DEBUG nova.network.neutron [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 823.604212] env[62619]: DEBUG nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 823.640642] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e2e38502-7230-4633-8bb8-66858076f26f tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "05a77fe4-172d-4e25-9652-f9dc7cc365ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 113.340s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 823.864262] env[62619]: INFO nova.compute.manager [-] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Took 1.02 seconds to deallocate network for instance. [ 823.866535] env[62619]: DEBUG nova.compute.claims [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 823.866713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 823.943976] env[62619]: INFO nova.compute.manager [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] [instance: cc6bdf77-2540-47e3-aed2-cb0c73b329cc] Took 1.02 seconds to deallocate network for instance. [ 824.109816] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.300s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 824.109816] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 824.112861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.469s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.143069] env[62619]: DEBUG nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 824.524221] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "1c1b7717-30a9-40c9-913f-6d65a619b94a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.524468] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1c1b7717-30a9-40c9-913f-6d65a619b94a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 824.616798] env[62619]: DEBUG nova.compute.utils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 824.621489] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 824.621576] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 824.661689] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 824.673176] env[62619]: DEBUG nova.policy [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2d072117ea54a699e2f845b79533dc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c036a4734208450691fba9eb7355f122', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 824.835122] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a4ac4b0-7e08-4e7e-afe2-cc42d3f84466 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.842707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e250a01-f0b9-435d-a961-68231b903086 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.873093] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01390872-21ea-4b7b-9864-7a9641418e3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.880222] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1f90640-be32-491b-bed0-2be0f443e0bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 824.893854] env[62619]: DEBUG nova.compute.provider_tree [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 824.921480] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Successfully created port: 276a428a-8efe-4f4f-b7b4-0fa9fe606361 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 824.970036] env[62619]: INFO nova.scheduler.client.report [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Deleted allocations for instance cc6bdf77-2540-47e3-aed2-cb0c73b329cc [ 825.120440] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 825.398551] env[62619]: DEBUG nova.scheduler.client.report [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 825.480384] env[62619]: DEBUG oslo_concurrency.lockutils [None req-542a01c7-19e7-4d0b-a6ac-cd85b63306c6 tempest-ServersTestFqdnHostnames-1454798807 tempest-ServersTestFqdnHostnames-1454798807-project-member] Lock "cc6bdf77-2540-47e3-aed2-cb0c73b329cc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 105.926s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.578436] env[62619]: DEBUG nova.compute.manager [req-d10fff08-d190-4aff-81c4-77d289cbffdf req-f0155436-e0a8-4c73-a6e4-f067c5e17f44 service nova] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Received event network-changed-276a428a-8efe-4f4f-b7b4-0fa9fe606361 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 825.578734] env[62619]: DEBUG nova.compute.manager [req-d10fff08-d190-4aff-81c4-77d289cbffdf req-f0155436-e0a8-4c73-a6e4-f067c5e17f44 service nova] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Refreshing instance network info cache due to event network-changed-276a428a-8efe-4f4f-b7b4-0fa9fe606361. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 825.579067] env[62619]: DEBUG oslo_concurrency.lockutils [req-d10fff08-d190-4aff-81c4-77d289cbffdf req-f0155436-e0a8-4c73-a6e4-f067c5e17f44 service nova] Acquiring lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.579173] env[62619]: DEBUG oslo_concurrency.lockutils [req-d10fff08-d190-4aff-81c4-77d289cbffdf req-f0155436-e0a8-4c73-a6e4-f067c5e17f44 service nova] Acquired lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.579294] env[62619]: DEBUG nova.network.neutron [req-d10fff08-d190-4aff-81c4-77d289cbffdf req-f0155436-e0a8-4c73-a6e4-f067c5e17f44 service nova] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Refreshing network info cache for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 825.737101] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. [ 825.737101] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 825.737101] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 825.737101] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 825.737101] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 825.737101] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 825.737101] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 825.737101] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 825.737101] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.737101] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 825.737101] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.737101] env[62619]: ERROR nova.compute.manager raise self.value [ 825.737101] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 825.737101] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 825.737101] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.737101] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 825.737560] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.737560] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 825.737560] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. [ 825.737560] env[62619]: ERROR nova.compute.manager [ 825.737560] env[62619]: Traceback (most recent call last): [ 825.737560] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 825.737560] env[62619]: listener.cb(fileno) [ 825.737560] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 825.737560] env[62619]: result = function(*args, **kwargs) [ 825.737560] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 825.737560] env[62619]: return func(*args, **kwargs) [ 825.737560] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 825.737560] env[62619]: raise e [ 825.737560] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 825.737560] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 825.737560] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 825.737560] env[62619]: created_port_ids = self._update_ports_for_instance( [ 825.737560] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 825.737560] env[62619]: with excutils.save_and_reraise_exception(): [ 825.737560] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.737560] env[62619]: self.force_reraise() [ 825.737560] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.737560] env[62619]: raise self.value [ 825.737560] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 825.737560] env[62619]: updated_port = self._update_port( [ 825.737560] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.737560] env[62619]: _ensure_no_port_binding_failure(port) [ 825.737560] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.737560] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 825.738336] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. [ 825.738336] env[62619]: Removing descriptor: 18 [ 825.903993] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.791s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 825.904679] env[62619]: ERROR nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Traceback (most recent call last): [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self.driver.spawn(context, instance, image_meta, [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self._vmops.spawn(context, instance, image_meta, injected_files, [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] vm_ref = self.build_virtual_machine(instance, [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] vif_infos = vmwarevif.get_vif_info(self._session, [ 825.904679] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] for vif in network_info: [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] return self._sync_wrapper(fn, *args, **kwargs) [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self.wait() [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self[:] = self._gt.wait() [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] return self._exit_event.wait() [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] current.throw(*self._exc) [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 825.904974] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] result = function(*args, **kwargs) [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] return func(*args, **kwargs) [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] raise e [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] nwinfo = self.network_api.allocate_for_instance( [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] created_port_ids = self._update_ports_for_instance( [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] with excutils.save_and_reraise_exception(): [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] self.force_reraise() [ 825.905274] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] raise self.value [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] updated_port = self._update_port( [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] _ensure_no_port_binding_failure(port) [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] raise exception.PortBindingFailed(port_id=port['id']) [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] nova.exception.PortBindingFailed: Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. [ 825.905613] env[62619]: ERROR nova.compute.manager [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] [ 825.905613] env[62619]: DEBUG nova.compute.utils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 825.906741] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.644s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 825.910043] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Build of instance 1def15dd-e2ad-470e-bff8-9121df881d46 was re-scheduled: Binding failed for port e88f3386-ea87-4b22-9c58-774374a20367, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 825.910482] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 825.910706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Acquiring lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 825.910891] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Acquired lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 825.915141] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 825.984405] env[62619]: DEBUG nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 826.098332] env[62619]: DEBUG nova.network.neutron [req-d10fff08-d190-4aff-81c4-77d289cbffdf req-f0155436-e0a8-4c73-a6e4-f067c5e17f44 service nova] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 826.132450] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 826.164363] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 826.164607] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 826.164797] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 826.167703] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 826.167917] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 826.168090] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 826.168305] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 826.168494] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 826.168733] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 826.168955] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 826.169164] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 826.170061] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e2f4bb-0bd3-4daf-9b7f-6674ed0a46b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.173234] env[62619]: DEBUG nova.network.neutron [req-d10fff08-d190-4aff-81c4-77d289cbffdf req-f0155436-e0a8-4c73-a6e4-f067c5e17f44 service nova] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.179393] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3caa7f5d-ae9b-43f8-b602-87787ee2849a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.199593] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Traceback (most recent call last): [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] yield resources [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self.driver.spawn(context, instance, image_meta, [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self._vmops.spawn(context, instance, image_meta, injected_files, [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] vm_ref = self.build_virtual_machine(instance, [ 826.199593] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] vif_infos = vmwarevif.get_vif_info(self._session, [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] for vif in network_info: [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] return self._sync_wrapper(fn, *args, **kwargs) [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self.wait() [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self[:] = self._gt.wait() [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] return self._exit_event.wait() [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 826.199961] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] current.throw(*self._exc) [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] result = function(*args, **kwargs) [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] return func(*args, **kwargs) [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] raise e [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] nwinfo = self.network_api.allocate_for_instance( [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] created_port_ids = self._update_ports_for_instance( [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] with excutils.save_and_reraise_exception(): [ 826.200257] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self.force_reraise() [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] raise self.value [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] updated_port = self._update_port( [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] _ensure_no_port_binding_failure(port) [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] raise exception.PortBindingFailed(port_id=port['id']) [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] nova.exception.PortBindingFailed: Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. [ 826.200541] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] [ 826.200541] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Terminating instance [ 826.201813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 826.438584] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 826.502823] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 826.514254] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 826.668151] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0e607e4-a022-4e49-baa3-49bca4f09f98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.675487] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe87628b-21f6-479b-9b6d-fea9c17fcf71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.678714] env[62619]: DEBUG oslo_concurrency.lockutils [req-d10fff08-d190-4aff-81c4-77d289cbffdf req-f0155436-e0a8-4c73-a6e4-f067c5e17f44 service nova] Releasing lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 826.679096] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquired lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 826.679284] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 826.707166] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3245fd8e-2e17-4507-9231-af93efcee883 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.715135] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-448117d3-0bda-402b-9203-70f40b750391 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 826.729128] env[62619]: DEBUG nova.compute.provider_tree [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 827.017345] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Releasing lock "refresh_cache-1def15dd-e2ad-470e-bff8-9121df881d46" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.017600] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 827.017788] env[62619]: DEBUG nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 827.017967] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 827.035962] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 827.208066] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 827.234032] env[62619]: DEBUG nova.scheduler.client.report [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 827.309267] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.541561] env[62619]: DEBUG nova.network.neutron [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 827.608243] env[62619]: DEBUG nova.compute.manager [req-5e919b50-94ac-441e-920a-4dfc9df51c70 req-2f7e6f64-2fc8-4354-b351-b0f2181fb127 service nova] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Received event network-vif-deleted-276a428a-8efe-4f4f-b7b4-0fa9fe606361 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 827.739062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.831s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 827.739195] env[62619]: ERROR nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Traceback (most recent call last): [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self.driver.spawn(context, instance, image_meta, [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self._vmops.spawn(context, instance, image_meta, injected_files, [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] vm_ref = self.build_virtual_machine(instance, [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] vif_infos = vmwarevif.get_vif_info(self._session, [ 827.739195] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] for vif in network_info: [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] return self._sync_wrapper(fn, *args, **kwargs) [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self.wait() [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self[:] = self._gt.wait() [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] return self._exit_event.wait() [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] current.throw(*self._exc) [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 827.739520] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] result = function(*args, **kwargs) [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] return func(*args, **kwargs) [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] raise e [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] nwinfo = self.network_api.allocate_for_instance( [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] created_port_ids = self._update_ports_for_instance( [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] with excutils.save_and_reraise_exception(): [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] self.force_reraise() [ 827.739897] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] raise self.value [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] updated_port = self._update_port( [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] _ensure_no_port_binding_failure(port) [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] raise exception.PortBindingFailed(port_id=port['id']) [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] nova.exception.PortBindingFailed: Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. [ 827.740321] env[62619]: ERROR nova.compute.manager [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] [ 827.740321] env[62619]: DEBUG nova.compute.utils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 827.742167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.208s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 827.744131] env[62619]: INFO nova.compute.claims [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 827.747843] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Build of instance 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb was re-scheduled: Binding failed for port a3cf79b8-ba17-4716-ba0a-08056b13fe17, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 827.748490] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 827.748804] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 827.749013] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquired lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 827.749244] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 827.813717] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Releasing lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 827.813717] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 827.813717] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 827.813717] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c77cda9-838c-4fff-82e1-0489137f89aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.822595] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5356484-fe2f-4005-8fa7-aa07827ab6d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 827.843857] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d7ddbbe2-2bea-4fa1-938c-a344f49f0178 could not be found. [ 827.844093] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 827.844275] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Took 0.03 seconds to destroy the instance on the hypervisor. [ 827.844516] env[62619]: DEBUG oslo.service.loopingcall [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 827.844734] env[62619]: DEBUG nova.compute.manager [-] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 827.844826] env[62619]: DEBUG nova.network.neutron [-] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 827.859366] env[62619]: DEBUG nova.network.neutron [-] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 828.047140] env[62619]: INFO nova.compute.manager [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] [instance: 1def15dd-e2ad-470e-bff8-9121df881d46] Took 1.03 seconds to deallocate network for instance. [ 828.269729] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 828.361990] env[62619]: DEBUG nova.network.neutron [-] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.364945] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 828.866304] env[62619]: INFO nova.compute.manager [-] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Took 1.02 seconds to deallocate network for instance. [ 828.866941] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Releasing lock "refresh_cache-3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 828.867169] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 828.867347] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 828.867506] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 828.870878] env[62619]: DEBUG nova.compute.claims [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 828.871071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 828.884739] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 828.978038] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d76c134-1c3f-4b17-8796-693e8f3ef69a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 828.985362] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2434681b-a7fc-4321-a73b-2aec18bb7ef9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.014972] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3727ef7c-1679-4fe3-9645-affa8e15bfdd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.021999] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ae4e8a-2d0a-4d4b-ba0b-456697bdda0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 829.034805] env[62619]: DEBUG nova.compute.provider_tree [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 829.075942] env[62619]: INFO nova.scheduler.client.report [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Deleted allocations for instance 1def15dd-e2ad-470e-bff8-9121df881d46 [ 829.388636] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 829.537322] env[62619]: DEBUG nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 829.586230] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5dbec5f2-e960-4d4a-bc81-03ef06220291 tempest-ServerActionsTestOtherA-295738534 tempest-ServerActionsTestOtherA-295738534-project-member] Lock "1def15dd-e2ad-470e-bff8-9121df881d46" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.131s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 829.891782] env[62619]: INFO nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb] Took 1.02 seconds to deallocate network for instance. [ 830.043192] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.301s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 830.043738] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 830.048697] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.699s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 830.050159] env[62619]: INFO nova.compute.claims [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 830.089318] env[62619]: DEBUG nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 830.557126] env[62619]: DEBUG nova.compute.utils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 830.560571] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 830.560892] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 830.618065] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 830.632101] env[62619]: DEBUG nova.policy [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c2d072117ea54a699e2f845b79533dc6', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c036a4734208450691fba9eb7355f122', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 830.925476] env[62619]: INFO nova.scheduler.client.report [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Deleted allocations for instance 3ef0d65a-f5f4-4efe-9027-f5a38458e4fb [ 831.059015] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Successfully created port: 993811f2-1949-4f82-a090-188abe4805c8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 831.061547] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 831.336291] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb7b0540-663c-4bb4-b34e-97a1894912d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.345162] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-017721e1-6809-486d-be36-8f7ff2f7c466 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.386281] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ec1daf-b57c-4adf-b92a-f531826a319e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.393665] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a47d8764-385b-43aa-bf7c-6ee466ad5794 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 831.410655] env[62619]: DEBUG nova.compute.provider_tree [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 831.442260] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "3ef0d65a-f5f4-4efe-9027-f5a38458e4fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 95.855s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 831.914318] env[62619]: DEBUG nova.scheduler.client.report [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 831.943510] env[62619]: DEBUG nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 832.077515] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 832.102194] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 832.102440] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 832.102594] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 832.102772] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 832.102917] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 832.103085] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 832.103294] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 832.103536] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 832.103604] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 832.103744] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 832.103909] env[62619]: DEBUG nova.virt.hardware [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 832.104796] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7cfe75-2f6c-4381-9168-c353139ee3a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.112994] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95ff12fb-26cc-4e9f-9257-8f6e37722ae1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 832.240203] env[62619]: DEBUG nova.compute.manager [req-88a7c817-0748-4309-bd97-7d9a70567853 req-f66fcf26-505a-42fc-b485-790417be9b02 service nova] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Received event network-changed-993811f2-1949-4f82-a090-188abe4805c8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 832.240354] env[62619]: DEBUG nova.compute.manager [req-88a7c817-0748-4309-bd97-7d9a70567853 req-f66fcf26-505a-42fc-b485-790417be9b02 service nova] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Refreshing instance network info cache due to event network-changed-993811f2-1949-4f82-a090-188abe4805c8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 832.240553] env[62619]: DEBUG oslo_concurrency.lockutils [req-88a7c817-0748-4309-bd97-7d9a70567853 req-f66fcf26-505a-42fc-b485-790417be9b02 service nova] Acquiring lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.240714] env[62619]: DEBUG oslo_concurrency.lockutils [req-88a7c817-0748-4309-bd97-7d9a70567853 req-f66fcf26-505a-42fc-b485-790417be9b02 service nova] Acquired lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 832.240847] env[62619]: DEBUG nova.network.neutron [req-88a7c817-0748-4309-bd97-7d9a70567853 req-f66fcf26-505a-42fc-b485-790417be9b02 service nova] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Refreshing network info cache for port 993811f2-1949-4f82-a090-188abe4805c8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 832.425962] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.377s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 832.426610] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 832.430248] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.455s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.442511] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. [ 832.442511] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 832.442511] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 832.442511] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 832.442511] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 832.442511] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 832.442511] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 832.442511] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 832.442511] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 832.442511] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 832.442511] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 832.442511] env[62619]: ERROR nova.compute.manager raise self.value [ 832.442511] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 832.442511] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 832.442511] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 832.442511] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 832.442988] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 832.442988] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 832.442988] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. [ 832.442988] env[62619]: ERROR nova.compute.manager [ 832.442988] env[62619]: Traceback (most recent call last): [ 832.442988] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 832.442988] env[62619]: listener.cb(fileno) [ 832.442988] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 832.442988] env[62619]: result = function(*args, **kwargs) [ 832.442988] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 832.442988] env[62619]: return func(*args, **kwargs) [ 832.442988] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 832.442988] env[62619]: raise e [ 832.442988] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 832.442988] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 832.442988] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 832.442988] env[62619]: created_port_ids = self._update_ports_for_instance( [ 832.442988] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 832.442988] env[62619]: with excutils.save_and_reraise_exception(): [ 832.442988] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 832.442988] env[62619]: self.force_reraise() [ 832.442988] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 832.442988] env[62619]: raise self.value [ 832.442988] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 832.442988] env[62619]: updated_port = self._update_port( [ 832.442988] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 832.442988] env[62619]: _ensure_no_port_binding_failure(port) [ 832.442988] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 832.442988] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 832.443794] env[62619]: nova.exception.PortBindingFailed: Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. [ 832.443794] env[62619]: Removing descriptor: 18 [ 832.443794] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Traceback (most recent call last): [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] yield resources [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self.driver.spawn(context, instance, image_meta, [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self._vmops.spawn(context, instance, image_meta, injected_files, [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 832.443794] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] vm_ref = self.build_virtual_machine(instance, [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] vif_infos = vmwarevif.get_vif_info(self._session, [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] for vif in network_info: [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] return self._sync_wrapper(fn, *args, **kwargs) [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self.wait() [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self[:] = self._gt.wait() [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] return self._exit_event.wait() [ 832.444121] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] result = hub.switch() [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] return self.greenlet.switch() [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] result = function(*args, **kwargs) [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] return func(*args, **kwargs) [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] raise e [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] nwinfo = self.network_api.allocate_for_instance( [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 832.444466] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] created_port_ids = self._update_ports_for_instance( [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] with excutils.save_and_reraise_exception(): [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self.force_reraise() [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] raise self.value [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] updated_port = self._update_port( [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] _ensure_no_port_binding_failure(port) [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 832.444808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] raise exception.PortBindingFailed(port_id=port['id']) [ 832.445150] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] nova.exception.PortBindingFailed: Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. [ 832.445150] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] [ 832.445150] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Terminating instance [ 832.446015] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 832.477915] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.525929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 832.526052] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 832.761644] env[62619]: DEBUG nova.network.neutron [req-88a7c817-0748-4309-bd97-7d9a70567853 req-f66fcf26-505a-42fc-b485-790417be9b02 service nova] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 832.838800] env[62619]: DEBUG nova.network.neutron [req-88a7c817-0748-4309-bd97-7d9a70567853 req-f66fcf26-505a-42fc-b485-790417be9b02 service nova] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 832.946384] env[62619]: DEBUG nova.compute.utils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 832.948016] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 832.948200] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 832.998717] env[62619]: DEBUG nova.policy [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1ab4be10d444359a7a3b245ec9b9ea0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c583f4e7b29743aabd3e96f7c53fa04f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 833.192949] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65183876-45a2-419b-b8dc-072027363abf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.200792] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b15a39-9020-4862-b736-03f9021cecb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.231545] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1048efc0-73dd-45af-b70d-7ea5cdd0469f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.239170] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78173b8f-0652-4a8f-9d7c-244948f65f15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 833.256317] env[62619]: DEBUG nova.compute.provider_tree [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 833.268103] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Successfully created port: f2af30b8-90ff-4267-8101-47d1f59d2f89 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 833.342363] env[62619]: DEBUG oslo_concurrency.lockutils [req-88a7c817-0748-4309-bd97-7d9a70567853 req-f66fcf26-505a-42fc-b485-790417be9b02 service nova] Releasing lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 833.342741] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquired lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 833.342930] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 833.453190] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 833.759623] env[62619]: DEBUG nova.scheduler.client.report [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 833.863788] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 833.967193] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.128476] env[62619]: ERROR nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Instance failed network setup after 1 attempt(s): nova.exception.PortBindingFailed: Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. [ 834.128476] env[62619]: ERROR nova.compute.manager Traceback (most recent call last): [ 834.128476] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 834.128476] env[62619]: ERROR nova.compute.manager nwinfo = self.network_api.allocate_for_instance( [ 834.128476] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 834.128476] env[62619]: ERROR nova.compute.manager created_port_ids = self._update_ports_for_instance( [ 834.128476] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 834.128476] env[62619]: ERROR nova.compute.manager with excutils.save_and_reraise_exception(): [ 834.128476] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 834.128476] env[62619]: ERROR nova.compute.manager self.force_reraise() [ 834.128476] env[62619]: ERROR nova.compute.manager File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 834.128476] env[62619]: ERROR nova.compute.manager raise self.value [ 834.128476] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 834.128476] env[62619]: ERROR nova.compute.manager updated_port = self._update_port( [ 834.128476] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 834.128476] env[62619]: ERROR nova.compute.manager _ensure_no_port_binding_failure(port) [ 834.128869] env[62619]: ERROR nova.compute.manager File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 834.128869] env[62619]: ERROR nova.compute.manager raise exception.PortBindingFailed(port_id=port['id']) [ 834.128869] env[62619]: ERROR nova.compute.manager nova.exception.PortBindingFailed: Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. [ 834.128869] env[62619]: ERROR nova.compute.manager [ 834.128869] env[62619]: Traceback (most recent call last): [ 834.128869] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/poll.py", line 111, in wait [ 834.128869] env[62619]: listener.cb(fileno) [ 834.128869] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 834.128869] env[62619]: result = function(*args, **kwargs) [ 834.128869] env[62619]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 834.128869] env[62619]: return func(*args, **kwargs) [ 834.128869] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 834.128869] env[62619]: raise e [ 834.128869] env[62619]: File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 834.128869] env[62619]: nwinfo = self.network_api.allocate_for_instance( [ 834.128869] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 834.128869] env[62619]: created_port_ids = self._update_ports_for_instance( [ 834.128869] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 834.128869] env[62619]: with excutils.save_and_reraise_exception(): [ 834.128869] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 834.128869] env[62619]: self.force_reraise() [ 834.128869] env[62619]: File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 834.128869] env[62619]: raise self.value [ 834.128869] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 834.128869] env[62619]: updated_port = self._update_port( [ 834.128869] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 834.128869] env[62619]: _ensure_no_port_binding_failure(port) [ 834.128869] env[62619]: File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 834.128869] env[62619]: raise exception.PortBindingFailed(port_id=port['id']) [ 834.130566] env[62619]: nova.exception.PortBindingFailed: Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. [ 834.130566] env[62619]: Removing descriptor: 18 [ 834.268238] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.838s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 834.268910] env[62619]: ERROR nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Traceback (most recent call last): [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self.driver.spawn(context, instance, image_meta, [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self._vmops.spawn(context, instance, image_meta, injected_files, [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] vm_ref = self.build_virtual_machine(instance, [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] vif_infos = vmwarevif.get_vif_info(self._session, [ 834.268910] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] for vif in network_info: [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] return self._sync_wrapper(fn, *args, **kwargs) [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self.wait() [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self[:] = self._gt.wait() [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] return self._exit_event.wait() [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] current.throw(*self._exc) [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 834.269190] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] result = function(*args, **kwargs) [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] return func(*args, **kwargs) [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] raise e [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] nwinfo = self.network_api.allocate_for_instance( [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] created_port_ids = self._update_ports_for_instance( [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] with excutils.save_and_reraise_exception(): [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] self.force_reraise() [ 834.269476] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] raise self.value [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] updated_port = self._update_port( [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] _ensure_no_port_binding_failure(port) [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] raise exception.PortBindingFailed(port_id=port['id']) [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] nova.exception.PortBindingFailed: Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. [ 834.269764] env[62619]: ERROR nova.compute.manager [instance: 55316613-3507-4386-a7f9-dbcc52f26327] [ 834.270348] env[62619]: DEBUG nova.compute.utils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 834.272008] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Build of instance 55316613-3507-4386-a7f9-dbcc52f26327 was re-scheduled: Binding failed for port 2689bc08-3f1a-4b93-9145-d9789fb216cb, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 834.272488] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 834.272722] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquiring lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.272865] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Acquired lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.273069] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 834.275255] env[62619]: DEBUG nova.compute.manager [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Received event network-vif-deleted-993811f2-1949-4f82-a090-188abe4805c8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.275463] env[62619]: DEBUG nova.compute.manager [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Received event network-changed-f2af30b8-90ff-4267-8101-47d1f59d2f89 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 834.275631] env[62619]: DEBUG nova.compute.manager [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Refreshing instance network info cache due to event network-changed-f2af30b8-90ff-4267-8101-47d1f59d2f89. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 834.275821] env[62619]: DEBUG oslo_concurrency.lockutils [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] Acquiring lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.275954] env[62619]: DEBUG oslo_concurrency.lockutils [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] Acquired lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 834.276118] env[62619]: DEBUG nova.network.neutron [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Refreshing network info cache for port f2af30b8-90ff-4267-8101-47d1f59d2f89 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 834.279595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.771s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 834.279595] env[62619]: INFO nova.compute.claims [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 834.463169] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 834.467986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Releasing lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 834.468408] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 834.468599] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 834.468884] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a4c226df-fb68-4507-9c31-be50735f8a40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.478722] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f44243-14bf-4065-9fd0-0293e5669632 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.499713] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 834.499713] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 834.499713] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 834.499930] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 834.499930] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 834.499930] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 834.499930] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 834.499930] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 834.500082] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 834.500082] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 834.500082] env[62619]: DEBUG nova.virt.hardware [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 834.500340] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a79bc375-ed77-4a18-9017-56a532b05f53 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.506641] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 42e05759-742f-4732-97cb-cab2cfb06996 could not be found. [ 834.506847] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 834.507384] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Took 0.04 seconds to destroy the instance on the hypervisor. [ 834.507384] env[62619]: DEBUG oslo.service.loopingcall [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 834.507923] env[62619]: DEBUG nova.compute.manager [-] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 834.508049] env[62619]: DEBUG nova.network.neutron [-] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 834.512864] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b6f3ff0-12b9-44ad-a0ec-d1e194670d15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 834.526477] env[62619]: ERROR nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Instance failed to spawn: nova.exception.PortBindingFailed: Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Traceback (most recent call last): [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/compute/manager.py", line 2894, in _build_resources [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] yield resources [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self.driver.spawn(context, instance, image_meta, [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] vm_ref = self.build_virtual_machine(instance, [ 834.526477] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] vif_infos = vmwarevif.get_vif_info(self._session, [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] for vif in network_info: [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] return self._sync_wrapper(fn, *args, **kwargs) [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self.wait() [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self[:] = self._gt.wait() [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] return self._exit_event.wait() [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 834.527068] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] current.throw(*self._exc) [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] result = function(*args, **kwargs) [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] return func(*args, **kwargs) [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] raise e [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] nwinfo = self.network_api.allocate_for_instance( [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] created_port_ids = self._update_ports_for_instance( [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] with excutils.save_and_reraise_exception(): [ 834.527569] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self.force_reraise() [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] raise self.value [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] updated_port = self._update_port( [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] _ensure_no_port_binding_failure(port) [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] raise exception.PortBindingFailed(port_id=port['id']) [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] nova.exception.PortBindingFailed: Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. [ 834.528180] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] [ 834.528180] env[62619]: INFO nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Terminating instance [ 834.531418] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 834.658157] env[62619]: DEBUG nova.network.neutron [-] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.794750] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.818335] env[62619]: DEBUG nova.network.neutron [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 834.873418] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 834.905573] env[62619]: DEBUG nova.network.neutron [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.162780] env[62619]: DEBUG nova.network.neutron [-] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.376377] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Releasing lock "refresh_cache-55316613-3507-4386-a7f9-dbcc52f26327" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.376575] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 835.376750] env[62619]: DEBUG nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 835.376906] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 835.393322] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 835.407616] env[62619]: DEBUG oslo_concurrency.lockutils [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] Releasing lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 835.407863] env[62619]: DEBUG nova.compute.manager [req-b04da5c3-d2ee-452b-9988-3311990e7969 req-5b325c50-d859-4685-b853-afa934a3b82b service nova] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Received event network-vif-deleted-f2af30b8-90ff-4267-8101-47d1f59d2f89 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 835.408223] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 835.408385] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 835.548776] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b2cc6c-f680-4efd-9c11-1492e78f29f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.556185] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40e84ae6-f04b-41f7-8124-d3c9d8589903 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.584665] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-699ff035-c9d5-4d73-a003-7076d49b9ef0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.591496] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a8968da-cf60-4754-a9b9-82d37c695fd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 835.603949] env[62619]: DEBUG nova.compute.provider_tree [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 835.665062] env[62619]: INFO nova.compute.manager [-] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Took 1.16 seconds to deallocate network for instance. [ 835.667253] env[62619]: DEBUG nova.compute.claims [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 835.667462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 835.896174] env[62619]: DEBUG nova.network.neutron [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 835.925876] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 836.015715] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 836.107136] env[62619]: DEBUG nova.scheduler.client.report [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 836.399075] env[62619]: INFO nova.compute.manager [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] [instance: 55316613-3507-4386-a7f9-dbcc52f26327] Took 1.02 seconds to deallocate network for instance. [ 836.518045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 836.518700] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 836.518914] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 836.519253] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e70d0b2-5af7-48e0-9b6a-aad6b95baeaf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.529292] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ea9ef59-f6a2-435f-9988-5021c85fc3b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 836.552068] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 30045f41-3396-47cb-833d-b5b434c3671b could not be found. [ 836.552298] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 836.552474] env[62619]: INFO nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Took 0.03 seconds to destroy the instance on the hypervisor. [ 836.552711] env[62619]: DEBUG oslo.service.loopingcall [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 836.552925] env[62619]: DEBUG nova.compute.manager [-] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 836.553032] env[62619]: DEBUG nova.network.neutron [-] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 836.567220] env[62619]: DEBUG nova.network.neutron [-] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 836.612124] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 836.612637] env[62619]: DEBUG nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 836.614971] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 12.748s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 837.069718] env[62619]: DEBUG nova.network.neutron [-] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 837.120060] env[62619]: DEBUG nova.compute.utils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 837.126394] env[62619]: DEBUG nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 837.126517] env[62619]: DEBUG nova.network.neutron [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 837.167572] env[62619]: DEBUG nova.policy [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ce526a1d824fe8b6573fa80adcd53f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33242a5e0a764cf3b8af687fc4302e8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 837.329156] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a128186b-590c-4259-b826-4b661d0a5c26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.336565] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b1af84a-23e1-48ea-8f0f-d33b18451e2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.368366] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89525eac-a44f-4028-8832-610796e5211a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.376092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e935bab8-0385-44e0-a1bd-8345ae3f8042 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 837.389498] env[62619]: DEBUG nova.compute.provider_tree [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 837.426036] env[62619]: INFO nova.scheduler.client.report [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Deleted allocations for instance 55316613-3507-4386-a7f9-dbcc52f26327 [ 837.489253] env[62619]: DEBUG nova.network.neutron [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Successfully created port: 4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 837.571643] env[62619]: INFO nova.compute.manager [-] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Took 1.02 seconds to deallocate network for instance. [ 837.574056] env[62619]: DEBUG nova.compute.claims [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Aborting claim: {{(pid=62619) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 837.574244] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 837.627324] env[62619]: DEBUG nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 837.892574] env[62619]: DEBUG nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 837.934800] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac9110b4-60c2-4156-99a4-dc499c441830 tempest-MultipleCreateTestJSON-708311840 tempest-MultipleCreateTestJSON-708311840-project-member] Lock "55316613-3507-4386-a7f9-dbcc52f26327" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 102.328s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.397192] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.782s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.397675] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Traceback (most recent call last): [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self.driver.spawn(context, instance, image_meta, [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] vm_ref = self.build_virtual_machine(instance, [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] vif_infos = vmwarevif.get_vif_info(self._session, [ 838.397675] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] for vif in network_info: [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] return self._sync_wrapper(fn, *args, **kwargs) [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self.wait() [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self[:] = self._gt.wait() [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] return self._exit_event.wait() [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] result = hub.switch() [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 838.398017] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] return self.greenlet.switch() [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] result = function(*args, **kwargs) [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] return func(*args, **kwargs) [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] raise e [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] nwinfo = self.network_api.allocate_for_instance( [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] created_port_ids = self._update_ports_for_instance( [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] with excutils.save_and_reraise_exception(): [ 838.398339] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] self.force_reraise() [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] raise self.value [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] updated_port = self._update_port( [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] _ensure_no_port_binding_failure(port) [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] raise exception.PortBindingFailed(port_id=port['id']) [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] nova.exception.PortBindingFailed: Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. [ 838.398640] env[62619]: ERROR nova.compute.manager [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] [ 838.398889] env[62619]: DEBUG nova.compute.utils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 838.400667] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.738s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.401549] env[62619]: INFO nova.compute.claims [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 838.404537] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Build of instance 49fd766a-d798-415e-b5eb-4ad4fe7934c0 was re-scheduled: Binding failed for port 74940506-5781-4531-b29c-9caec5027589, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 838.405830] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 838.405830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 838.405830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquired lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 838.406068] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 838.437484] env[62619]: DEBUG nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 838.636706] env[62619]: DEBUG nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 838.716983] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 838.717245] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 838.717400] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 838.717636] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 838.717712] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 838.717854] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 838.718266] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 838.718482] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 838.718694] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 838.718923] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 838.719651] env[62619]: DEBUG nova.virt.hardware [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 838.720591] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b27c023-271c-4ba7-b2cc-e200c31b57f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.729376] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef72604f-7268-4875-9ffd-c7ef8f933657 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 838.929579] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 838.965149] env[62619]: DEBUG nova.compute.manager [req-0c5a485a-8fa5-47bb-8019-fb68b69903c5 req-31486579-3cee-4ba7-bb74-4cbe26dc5de1 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Received event network-vif-plugged-4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 838.965600] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c5a485a-8fa5-47bb-8019-fb68b69903c5 req-31486579-3cee-4ba7-bb74-4cbe26dc5de1 service nova] Acquiring lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 838.966019] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c5a485a-8fa5-47bb-8019-fb68b69903c5 req-31486579-3cee-4ba7-bb74-4cbe26dc5de1 service nova] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 838.966323] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c5a485a-8fa5-47bb-8019-fb68b69903c5 req-31486579-3cee-4ba7-bb74-4cbe26dc5de1 service nova] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 838.966598] env[62619]: DEBUG nova.compute.manager [req-0c5a485a-8fa5-47bb-8019-fb68b69903c5 req-31486579-3cee-4ba7-bb74-4cbe26dc5de1 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] No waiting events found dispatching network-vif-plugged-4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 838.966888] env[62619]: WARNING nova.compute.manager [req-0c5a485a-8fa5-47bb-8019-fb68b69903c5 req-31486579-3cee-4ba7-bb74-4cbe26dc5de1 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Received unexpected event network-vif-plugged-4503b37c-ef93-4256-bf3f-6045d5857427 for instance with vm_state building and task_state spawning. [ 838.968469] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 839.047256] env[62619]: DEBUG nova.network.neutron [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Successfully updated port: 4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 839.084631] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 839.551084] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 839.551383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 839.551383] env[62619]: DEBUG nova.network.neutron [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 839.591522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Releasing lock "refresh_cache-49fd766a-d798-415e-b5eb-4ad4fe7934c0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 839.591522] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 839.591522] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 839.591522] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 839.609083] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 839.646651] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a865c36d-7ac9-4523-ae14-272767a86dfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.654383] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52fdf921-9621-4509-9022-7bb63fefa90a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.685341] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d664a67-e232-44f7-b87f-75215a44dc55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.693268] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa62aaf2-933e-46dc-a9a9-0ba16405ee85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 839.708036] env[62619]: DEBUG nova.compute.provider_tree [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 840.087992] env[62619]: DEBUG nova.network.neutron [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 840.111563] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.213218] env[62619]: DEBUG nova.scheduler.client.report [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 840.264722] env[62619]: DEBUG nova.network.neutron [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updating instance_info_cache with network_info: [{"id": "4503b37c-ef93-4256-bf3f-6045d5857427", "address": "fa:16:3e:b2:21:33", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4503b37c-ef", "ovs_interfaceid": "4503b37c-ef93-4256-bf3f-6045d5857427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 840.614836] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 49fd766a-d798-415e-b5eb-4ad4fe7934c0] Took 1.02 seconds to deallocate network for instance. [ 840.718133] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 840.718579] env[62619]: DEBUG nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 840.721076] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.218s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 840.723712] env[62619]: INFO nova.compute.claims [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 840.767418] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 840.767737] env[62619]: DEBUG nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Instance network_info: |[{"id": "4503b37c-ef93-4256-bf3f-6045d5857427", "address": "fa:16:3e:b2:21:33", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4503b37c-ef", "ovs_interfaceid": "4503b37c-ef93-4256-bf3f-6045d5857427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 840.768501] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:b2:21:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57c65f87-60fd-4882-ab30-31db49131b46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4503b37c-ef93-4256-bf3f-6045d5857427', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 840.777438] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Creating folder: Project (33242a5e0a764cf3b8af687fc4302e8e). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.777926] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dbab48a3-b8f3-417d-9ce6-003f005711a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.790280] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Created folder: Project (33242a5e0a764cf3b8af687fc4302e8e) in parent group-v290436. [ 840.790441] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Creating folder: Instances. Parent ref: group-v290464. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 840.790683] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f54bf558-24f5-4bfd-abc6-8b4b04fac7cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.799828] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Created folder: Instances in parent group-v290464. [ 840.800084] env[62619]: DEBUG oslo.service.loopingcall [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 840.800262] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 840.800491] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-47129667-6e39-4b0e-8adf-5829a428c434 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 840.818699] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 840.818699] env[62619]: value = "task-1364900" [ 840.818699] env[62619]: _type = "Task" [ 840.818699] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 840.826201] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364900, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 840.988075] env[62619]: DEBUG nova.compute.manager [req-e4c21406-5027-476d-8711-3836c15f796c req-d19d257f-2d7f-44ab-9f6b-9e777adcd368 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Received event network-changed-4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 840.988270] env[62619]: DEBUG nova.compute.manager [req-e4c21406-5027-476d-8711-3836c15f796c req-d19d257f-2d7f-44ab-9f6b-9e777adcd368 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Refreshing instance network info cache due to event network-changed-4503b37c-ef93-4256-bf3f-6045d5857427. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 840.988506] env[62619]: DEBUG oslo_concurrency.lockutils [req-e4c21406-5027-476d-8711-3836c15f796c req-d19d257f-2d7f-44ab-9f6b-9e777adcd368 service nova] Acquiring lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 840.988620] env[62619]: DEBUG oslo_concurrency.lockutils [req-e4c21406-5027-476d-8711-3836c15f796c req-d19d257f-2d7f-44ab-9f6b-9e777adcd368 service nova] Acquired lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 840.988798] env[62619]: DEBUG nova.network.neutron [req-e4c21406-5027-476d-8711-3836c15f796c req-d19d257f-2d7f-44ab-9f6b-9e777adcd368 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Refreshing network info cache for port 4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 841.229368] env[62619]: DEBUG nova.compute.utils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 841.233052] env[62619]: DEBUG nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 841.233243] env[62619]: DEBUG nova.network.neutron [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 841.285397] env[62619]: DEBUG nova.policy [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '4cdd5547cb3a4b5493ef44880000ef13', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '3cd104b68e8640f7a50da22df521f2d7', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 841.328642] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364900, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.459748] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "fa4e9947-5b99-4447-9535-6dbcaba635f8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.460109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "fa4e9947-5b99-4447-9535-6dbcaba635f8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.548890] env[62619]: DEBUG nova.network.neutron [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Successfully created port: 9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.639784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "da5a8beb-0246-43df-9813-436ddf8598a9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 841.640197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "da5a8beb-0246-43df-9813-436ddf8598a9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 841.652603] env[62619]: INFO nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Deleted allocations for instance 49fd766a-d798-415e-b5eb-4ad4fe7934c0 [ 841.736715] env[62619]: DEBUG nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 841.835088] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364900, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 841.836149] env[62619]: DEBUG nova.network.neutron [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Successfully created port: b245d7ba-eb3c-4286-840a-250524a98571 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 841.865041] env[62619]: DEBUG nova.network.neutron [req-e4c21406-5027-476d-8711-3836c15f796c req-d19d257f-2d7f-44ab-9f6b-9e777adcd368 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updated VIF entry in instance network info cache for port 4503b37c-ef93-4256-bf3f-6045d5857427. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 841.865403] env[62619]: DEBUG nova.network.neutron [req-e4c21406-5027-476d-8711-3836c15f796c req-d19d257f-2d7f-44ab-9f6b-9e777adcd368 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updating instance_info_cache with network_info: [{"id": "4503b37c-ef93-4256-bf3f-6045d5857427", "address": "fa:16:3e:b2:21:33", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4503b37c-ef", "ovs_interfaceid": "4503b37c-ef93-4256-bf3f-6045d5857427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 841.981316] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5a949f-cd26-41bc-b53b-5a91c54613eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 841.989323] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00be8231-b689-4dac-897b-c706cb378240 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.019042] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498907a9-83c3-4054-b8ca-4b4e446cb95a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.025995] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd0bebac-b4fb-4573-9a85-c260992100e8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.040250] env[62619]: DEBUG nova.compute.provider_tree [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 842.163900] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "49fd766a-d798-415e-b5eb-4ad4fe7934c0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 90.242s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 842.330428] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364900, 'name': CreateVM_Task, 'duration_secs': 1.285305} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.330629] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 842.337151] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.337362] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.337690] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 842.337926] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-eecc6998-27eb-4ef0-be1e-71286a3fe9d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.342326] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 842.342326] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528ce978-671c-91a9-a455-df68819cd6ba" [ 842.342326] env[62619]: _type = "Task" [ 842.342326] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.349521] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528ce978-671c-91a9-a455-df68819cd6ba, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 842.368197] env[62619]: DEBUG oslo_concurrency.lockutils [req-e4c21406-5027-476d-8711-3836c15f796c req-d19d257f-2d7f-44ab-9f6b-9e777adcd368 service nova] Releasing lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.542992] env[62619]: DEBUG nova.scheduler.client.report [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 842.666276] env[62619]: DEBUG nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 842.745242] env[62619]: DEBUG nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 842.770688] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 842.770941] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 842.771118] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 842.771295] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 842.771436] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 842.771576] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 842.771777] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 842.771932] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 842.772104] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 842.772269] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 842.772433] env[62619]: DEBUG nova.virt.hardware [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 842.773278] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34db5ae9-3c65-47d4-bcce-99ac863af5db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.780900] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b49b6a3-5ace-4aa5-89d9-d3daeebc5edc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.852106] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528ce978-671c-91a9-a455-df68819cd6ba, 'name': SearchDatastore_Task, 'duration_secs': 0.010989} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 842.852406] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 842.852640] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 842.852870] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 842.853024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 842.853204] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 842.853510] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-aaeea365-ed80-4009-b069-f1ae96cc833e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.862164] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 842.862437] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 842.863131] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a4d6f6e-3ec9-4824-b710-341d7e591c91 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 842.868089] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 842.868089] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52145fd7-835b-1832-b2b8-82edce22329f" [ 842.868089] env[62619]: _type = "Task" [ 842.868089] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 842.875476] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52145fd7-835b-1832-b2b8-82edce22329f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.048321] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.048854] env[62619]: DEBUG nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 843.051591] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 14.180s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.190881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.268445] env[62619]: DEBUG nova.compute.manager [req-811505a8-4db2-493f-a4d1-785821af4440 req-58fbea44-ca25-4d16-9eed-8aa37a23f44e service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Received event network-vif-plugged-9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 843.268664] env[62619]: DEBUG oslo_concurrency.lockutils [req-811505a8-4db2-493f-a4d1-785821af4440 req-58fbea44-ca25-4d16-9eed-8aa37a23f44e service nova] Acquiring lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 843.268868] env[62619]: DEBUG oslo_concurrency.lockutils [req-811505a8-4db2-493f-a4d1-785821af4440 req-58fbea44-ca25-4d16-9eed-8aa37a23f44e service nova] Lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 843.269049] env[62619]: DEBUG oslo_concurrency.lockutils [req-811505a8-4db2-493f-a4d1-785821af4440 req-58fbea44-ca25-4d16-9eed-8aa37a23f44e service nova] Lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 843.269215] env[62619]: DEBUG nova.compute.manager [req-811505a8-4db2-493f-a4d1-785821af4440 req-58fbea44-ca25-4d16-9eed-8aa37a23f44e service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] No waiting events found dispatching network-vif-plugged-9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 843.269374] env[62619]: WARNING nova.compute.manager [req-811505a8-4db2-493f-a4d1-785821af4440 req-58fbea44-ca25-4d16-9eed-8aa37a23f44e service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Received unexpected event network-vif-plugged-9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50 for instance with vm_state building and task_state spawning. [ 843.350717] env[62619]: DEBUG nova.network.neutron [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Successfully updated port: 9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 843.385951] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52145fd7-835b-1832-b2b8-82edce22329f, 'name': SearchDatastore_Task, 'duration_secs': 0.008691} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.386744] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78e8fee0-5dd7-4893-9b17-bda6fa6b6cb3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.391962] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 843.391962] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521f0898-bee3-bb5c-d61b-f6b950d4098f" [ 843.391962] env[62619]: _type = "Task" [ 843.391962] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.401261] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521f0898-bee3-bb5c-d61b-f6b950d4098f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 843.558050] env[62619]: DEBUG nova.compute.utils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 843.560120] env[62619]: DEBUG nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 843.560300] env[62619]: DEBUG nova.network.neutron [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 843.599484] env[62619]: DEBUG nova.policy [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '020e7ac0d1f2455ca052084290166100', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b806f620846340798e9257fb384a3d31', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 843.745047] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3c96d21-25b2-4f1c-b59d-0ee2a236475e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.753688] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92c95c98-3729-4c14-b23f-0c43a1a18c01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.782504] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cd45509-8a6b-4bff-9cdc-ba37e7f9d104 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.789467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-551fa32f-cd7c-453b-bccf-4533e0911953 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.802435] env[62619]: DEBUG nova.compute.provider_tree [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 843.838529] env[62619]: DEBUG nova.network.neutron [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Successfully created port: 3f368f58-8e8a-4d73-a8fa-31466fbc92ee {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 843.902165] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521f0898-bee3-bb5c-d61b-f6b950d4098f, 'name': SearchDatastore_Task, 'duration_secs': 0.008871} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 843.902536] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 843.902722] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 9fd66533-39ff-401d-81ef-f37eaceb3103/9fd66533-39ff-401d-81ef-f37eaceb3103.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 843.902981] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0d88842d-b8de-4c75-90dc-a9f913c30714 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 843.910342] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 843.910342] env[62619]: value = "task-1364901" [ 843.910342] env[62619]: _type = "Task" [ 843.910342] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 843.919399] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364901, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.063113] env[62619]: DEBUG nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 844.309072] env[62619]: DEBUG nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 844.420535] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364901, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.458638} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.420535] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 9fd66533-39ff-401d-81ef-f37eaceb3103/9fd66533-39ff-401d-81ef-f37eaceb3103.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 844.420700] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 844.420874] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-bb948fa8-6d0a-4851-993c-1881d8a283b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.426986] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 844.426986] env[62619]: value = "task-1364902" [ 844.426986] env[62619]: _type = "Task" [ 844.426986] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.435469] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364902, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 844.814361] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.763s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 844.814975] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Traceback (most recent call last): [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self.driver.spawn(context, instance, image_meta, [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self._vmops.spawn(context, instance, image_meta, injected_files, [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] vm_ref = self.build_virtual_machine(instance, [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] vif_infos = vmwarevif.get_vif_info(self._session, [ 844.814975] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] for vif in network_info: [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] return self._sync_wrapper(fn, *args, **kwargs) [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self.wait() [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self[:] = self._gt.wait() [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] return self._exit_event.wait() [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] current.throw(*self._exc) [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 844.815334] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] result = function(*args, **kwargs) [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] return func(*args, **kwargs) [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] raise e [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] nwinfo = self.network_api.allocate_for_instance( [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] created_port_ids = self._update_ports_for_instance( [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] with excutils.save_and_reraise_exception(): [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] self.force_reraise() [ 844.815693] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] raise self.value [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] updated_port = self._update_port( [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] _ensure_no_port_binding_failure(port) [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] raise exception.PortBindingFailed(port_id=port['id']) [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] nova.exception.PortBindingFailed: Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. [ 844.816061] env[62619]: ERROR nova.compute.manager [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] [ 844.816061] env[62619]: DEBUG nova.compute.utils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 844.816963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.200s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 844.818475] env[62619]: INFO nova.compute.claims [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 844.821227] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Build of instance d7ddbbe2-2bea-4fa1-938c-a344f49f0178 was re-scheduled: Binding failed for port 276a428a-8efe-4f4f-b7b4-0fa9fe606361, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 844.821629] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 844.821848] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 844.821990] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquired lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 844.822159] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 844.937206] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364902, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.060328} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 844.937576] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 844.938442] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4ee6527-aed3-475b-949e-8f44ff5f3134 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.960861] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Reconfiguring VM instance instance-00000040 to attach disk [datastore2] 9fd66533-39ff-401d-81ef-f37eaceb3103/9fd66533-39ff-401d-81ef-f37eaceb3103.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 844.961135] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f23de487-817a-4f9a-afb9-560d682b7cdf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 844.980391] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 844.980391] env[62619]: value = "task-1364903" [ 844.980391] env[62619]: _type = "Task" [ 844.980391] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 844.987975] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364903, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.073578] env[62619]: DEBUG nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 845.102016] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 845.102272] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 845.102468] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 845.103028] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 845.103028] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 845.103028] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 845.103260] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 845.103260] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 845.103466] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 845.103569] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 845.103692] env[62619]: DEBUG nova.virt.hardware [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 845.104563] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18a2c043-6896-40b3-afb0-7d3613f71ad7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.112492] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0781e1fb-d585-46f7-ab7e-2efaac3d7538 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.299332] env[62619]: DEBUG nova.compute.manager [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Received event network-changed-9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.299526] env[62619]: DEBUG nova.compute.manager [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Refreshing instance network info cache due to event network-changed-9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 845.299777] env[62619]: DEBUG oslo_concurrency.lockutils [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] Acquiring lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.299811] env[62619]: DEBUG oslo_concurrency.lockutils [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] Acquired lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.300550] env[62619]: DEBUG nova.network.neutron [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Refreshing network info cache for port 9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 845.333244] env[62619]: DEBUG nova.network.neutron [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Successfully updated port: 3f368f58-8e8a-4d73-a8fa-31466fbc92ee {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.348306] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 845.431567] env[62619]: DEBUG nova.compute.manager [req-38726bd3-6944-407b-963a-46a28f46cea7 req-246214fd-012a-4448-81e0-0fbc87a2bd1d service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Received event network-vif-plugged-b245d7ba-eb3c-4286-840a-250524a98571 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 845.431783] env[62619]: DEBUG oslo_concurrency.lockutils [req-38726bd3-6944-407b-963a-46a28f46cea7 req-246214fd-012a-4448-81e0-0fbc87a2bd1d service nova] Acquiring lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 845.431984] env[62619]: DEBUG oslo_concurrency.lockutils [req-38726bd3-6944-407b-963a-46a28f46cea7 req-246214fd-012a-4448-81e0-0fbc87a2bd1d service nova] Lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 845.432168] env[62619]: DEBUG oslo_concurrency.lockutils [req-38726bd3-6944-407b-963a-46a28f46cea7 req-246214fd-012a-4448-81e0-0fbc87a2bd1d service nova] Lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 845.432358] env[62619]: DEBUG nova.compute.manager [req-38726bd3-6944-407b-963a-46a28f46cea7 req-246214fd-012a-4448-81e0-0fbc87a2bd1d service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] No waiting events found dispatching network-vif-plugged-b245d7ba-eb3c-4286-840a-250524a98571 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 845.432528] env[62619]: WARNING nova.compute.manager [req-38726bd3-6944-407b-963a-46a28f46cea7 req-246214fd-012a-4448-81e0-0fbc87a2bd1d service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Received unexpected event network-vif-plugged-b245d7ba-eb3c-4286-840a-250524a98571 for instance with vm_state building and task_state spawning. [ 845.496300] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364903, 'name': ReconfigVM_Task, 'duration_secs': 0.26602} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 845.499047] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 845.500305] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Reconfigured VM instance instance-00000040 to attach disk [datastore2] 9fd66533-39ff-401d-81ef-f37eaceb3103/9fd66533-39ff-401d-81ef-f37eaceb3103.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 845.503637] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-1580bc9d-2011-42fb-8172-43b333aebca0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 845.508043] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 845.508043] env[62619]: value = "task-1364904" [ 845.508043] env[62619]: _type = "Task" [ 845.508043] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 845.515556] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364904, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 845.588215] env[62619]: DEBUG nova.network.neutron [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Successfully updated port: b245d7ba-eb3c-4286-840a-250524a98571 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 845.832399] env[62619]: DEBUG nova.network.neutron [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 845.837545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquiring lock "refresh_cache-29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 845.837674] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquired lock "refresh_cache-29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 845.837813] env[62619]: DEBUG nova.network.neutron [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 845.907615] env[62619]: DEBUG nova.network.neutron [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.002352] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Releasing lock "refresh_cache-d7ddbbe2-2bea-4fa1-938c-a344f49f0178" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.002795] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 846.002795] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 846.002978] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 846.016474] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364904, 'name': Rename_Task, 'duration_secs': 0.132567} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 846.018777] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 846.019150] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-faf6f696-7fce-449f-a64f-5ea40c3d3a38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.021592] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.025739] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 846.025739] env[62619]: value = "task-1364905" [ 846.025739] env[62619]: _type = "Task" [ 846.025739] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 846.036969] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364905, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.041059] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c1c2d51-7b2b-4912-86e1-cda221e8d797 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.048020] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98945fbc-25b5-4c62-85b4-25481b1cdaf1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.078299] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0830ad2-2915-462c-9e81-110c24404f5d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.085322] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6c7a77-b556-477d-aea8-b7cef283c796 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 846.090741] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 846.098322] env[62619]: DEBUG nova.compute.provider_tree [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 846.369767] env[62619]: DEBUG nova.network.neutron [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 846.410602] env[62619]: DEBUG oslo_concurrency.lockutils [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] Releasing lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 846.410848] env[62619]: DEBUG nova.compute.manager [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Received event network-vif-plugged-3f368f58-8e8a-4d73-a8fa-31466fbc92ee {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 846.411053] env[62619]: DEBUG oslo_concurrency.lockutils [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] Acquiring lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 846.412112] env[62619]: DEBUG oslo_concurrency.lockutils [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] Lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 846.412112] env[62619]: DEBUG oslo_concurrency.lockutils [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] Lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 846.412112] env[62619]: DEBUG nova.compute.manager [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] No waiting events found dispatching network-vif-plugged-3f368f58-8e8a-4d73-a8fa-31466fbc92ee {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 846.412112] env[62619]: WARNING nova.compute.manager [req-2b96dffc-3626-4ad3-b8d7-4dcf8e04a662 req-17a41660-6b28-4f9a-be5f-4ae7a28c2831 service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Received unexpected event network-vif-plugged-3f368f58-8e8a-4d73-a8fa-31466fbc92ee for instance with vm_state building and task_state spawning. [ 846.412112] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquired lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 846.412323] env[62619]: DEBUG nova.network.neutron [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 846.523570] env[62619]: DEBUG nova.network.neutron [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Updating instance_info_cache with network_info: [{"id": "3f368f58-8e8a-4d73-a8fa-31466fbc92ee", "address": "fa:16:3e:34:47:0a", "network": {"id": "d56f02e2-30ba-47cc-b4e5-d23064c8c463", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-723575391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b806f620846340798e9257fb384a3d31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f368f58-8e", "ovs_interfaceid": "3f368f58-8e8a-4d73-a8fa-31466fbc92ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.524644] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 846.536502] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364905, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 846.601195] env[62619]: DEBUG nova.scheduler.client.report [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 846.948795] env[62619]: DEBUG nova.network.neutron [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 847.026493] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Releasing lock "refresh_cache-29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.026822] env[62619]: DEBUG nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Instance network_info: |[{"id": "3f368f58-8e8a-4d73-a8fa-31466fbc92ee", "address": "fa:16:3e:34:47:0a", "network": {"id": "d56f02e2-30ba-47cc-b4e5-d23064c8c463", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-723575391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b806f620846340798e9257fb384a3d31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f368f58-8e", "ovs_interfaceid": "3f368f58-8e8a-4d73-a8fa-31466fbc92ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 847.027304] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: d7ddbbe2-2bea-4fa1-938c-a344f49f0178] Took 1.02 seconds to deallocate network for instance. [ 847.032338] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:34:47:0a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c8ee8640-3787-4c27-9581-962ddb2be7e5', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '3f368f58-8e8a-4d73-a8fa-31466fbc92ee', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.039584] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Creating folder: Project (b806f620846340798e9257fb384a3d31). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.043418] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e289717b-2935-4daa-a4b4-abc1274af99d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.050798] env[62619]: DEBUG oslo_vmware.api [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364905, 'name': PowerOnVM_Task, 'duration_secs': 0.995278} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.051034] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 847.051235] env[62619]: INFO nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Took 8.41 seconds to spawn the instance on the hypervisor. [ 847.051446] env[62619]: DEBUG nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 847.052201] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2811d3f-21e1-4248-b210-a2144b525396 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.055692] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Created folder: Project (b806f620846340798e9257fb384a3d31) in parent group-v290436. [ 847.055929] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Creating folder: Instances. Parent ref: group-v290467. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.056537] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-446b7b8e-f98b-43ab-859c-8904bf308e7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.064976] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Created folder: Instances in parent group-v290467. [ 847.065228] env[62619]: DEBUG oslo.service.loopingcall [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.066010] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.066166] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f525f5d5-1c94-44f1-a0b9-39b20c9cb1ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.087580] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.087580] env[62619]: value = "task-1364908" [ 847.087580] env[62619]: _type = "Task" [ 847.087580] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.095591] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364908, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.105560] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.289s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 847.106013] env[62619]: DEBUG nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 847.109132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.631s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 847.110640] env[62619]: INFO nova.compute.claims [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 847.393810] env[62619]: DEBUG nova.network.neutron [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Updating instance_info_cache with network_info: [{"id": "9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50", "address": "fa:16:3e:08:97:70", "network": {"id": "3310b65e-5f7a-403d-b9a7-edc5ed11ec70", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-42072988", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd104b68e8640f7a50da22df521f2d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a2f13f9-f4", "ovs_interfaceid": "9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b245d7ba-eb3c-4286-840a-250524a98571", "address": "fa:16:3e:0c:95:8e", "network": {"id": "e703c5f5-4850-44f3-843c-48fe531d0660", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1158257932", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3cd104b68e8640f7a50da22df521f2d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb245d7ba-eb", "ovs_interfaceid": "b245d7ba-eb3c-4286-840a-250524a98571", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 847.413339] env[62619]: DEBUG nova.compute.manager [req-7e46e095-be92-403e-94c3-ad03c5f78e25 req-a208e16b-8000-4aa3-a444-fd304845e48b service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Received event network-changed-3f368f58-8e8a-4d73-a8fa-31466fbc92ee {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.413539] env[62619]: DEBUG nova.compute.manager [req-7e46e095-be92-403e-94c3-ad03c5f78e25 req-a208e16b-8000-4aa3-a444-fd304845e48b service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Refreshing instance network info cache due to event network-changed-3f368f58-8e8a-4d73-a8fa-31466fbc92ee. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 847.413750] env[62619]: DEBUG oslo_concurrency.lockutils [req-7e46e095-be92-403e-94c3-ad03c5f78e25 req-a208e16b-8000-4aa3-a444-fd304845e48b service nova] Acquiring lock "refresh_cache-29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.413889] env[62619]: DEBUG oslo_concurrency.lockutils [req-7e46e095-be92-403e-94c3-ad03c5f78e25 req-a208e16b-8000-4aa3-a444-fd304845e48b service nova] Acquired lock "refresh_cache-29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.414056] env[62619]: DEBUG nova.network.neutron [req-7e46e095-be92-403e-94c3-ad03c5f78e25 req-a208e16b-8000-4aa3-a444-fd304845e48b service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Refreshing network info cache for port 3f368f58-8e8a-4d73-a8fa-31466fbc92ee {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 847.455390] env[62619]: DEBUG nova.compute.manager [req-37c75677-febc-48f6-9727-95325788311c req-986e34e2-7ff4-4880-9208-854241a24348 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Received event network-changed-b245d7ba-eb3c-4286-840a-250524a98571 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 847.455584] env[62619]: DEBUG nova.compute.manager [req-37c75677-febc-48f6-9727-95325788311c req-986e34e2-7ff4-4880-9208-854241a24348 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Refreshing instance network info cache due to event network-changed-b245d7ba-eb3c-4286-840a-250524a98571. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 847.455768] env[62619]: DEBUG oslo_concurrency.lockutils [req-37c75677-febc-48f6-9727-95325788311c req-986e34e2-7ff4-4880-9208-854241a24348 service nova] Acquiring lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.571337] env[62619]: INFO nova.compute.manager [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Took 27.09 seconds to build instance. [ 847.597256] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364908, 'name': CreateVM_Task, 'duration_secs': 0.296623} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 847.597476] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 847.598167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 847.598380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.599045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 847.599045] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3ac9c753-62b4-404e-942c-06494aece751 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.603498] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 847.603498] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ca2cbc-803e-4d8c-b70e-31020f239241" [ 847.603498] env[62619]: _type = "Task" [ 847.603498] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.610979] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ca2cbc-803e-4d8c-b70e-31020f239241, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 847.616872] env[62619]: DEBUG nova.compute.utils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 847.618222] env[62619]: DEBUG nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 847.618384] env[62619]: DEBUG nova.network.neutron [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 847.654211] env[62619]: DEBUG nova.policy [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e39de6e249384be79ec9429141464b81', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '944cd46054cf4352a1dff3284bd5a88c', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 847.897075] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Releasing lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 847.897473] env[62619]: DEBUG nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Instance network_info: |[{"id": "9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50", "address": "fa:16:3e:08:97:70", "network": {"id": "3310b65e-5f7a-403d-b9a7-edc5ed11ec70", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-42072988", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd104b68e8640f7a50da22df521f2d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a2f13f9-f4", "ovs_interfaceid": "9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b245d7ba-eb3c-4286-840a-250524a98571", "address": "fa:16:3e:0c:95:8e", "network": {"id": "e703c5f5-4850-44f3-843c-48fe531d0660", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1158257932", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3cd104b68e8640f7a50da22df521f2d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb245d7ba-eb", "ovs_interfaceid": "b245d7ba-eb3c-4286-840a-250524a98571", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 847.897795] env[62619]: DEBUG oslo_concurrency.lockutils [req-37c75677-febc-48f6-9727-95325788311c req-986e34e2-7ff4-4880-9208-854241a24348 service nova] Acquired lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 847.898019] env[62619]: DEBUG nova.network.neutron [req-37c75677-febc-48f6-9727-95325788311c req-986e34e2-7ff4-4880-9208-854241a24348 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Refreshing network info cache for port b245d7ba-eb3c-4286-840a-250524a98571 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 847.899402] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:97:70', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4734e5e-2a76-4bda-8905-70c9bf9e007f', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:95:8e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1002b79b-224e-41e3-a484-4245a767147a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b245d7ba-eb3c-4286-840a-250524a98571', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 847.909981] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Creating folder: Project (3cd104b68e8640f7a50da22df521f2d7). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.911017] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-952cc63b-29ba-49ca-bdc0-7ccc3a3dfa3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.922871] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Created folder: Project (3cd104b68e8640f7a50da22df521f2d7) in parent group-v290436. [ 847.923072] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Creating folder: Instances. Parent ref: group-v290470. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 847.923305] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a88e98a5-be39-45e7-9a32-65f44e1ae9a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.932054] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Created folder: Instances in parent group-v290470. [ 847.932278] env[62619]: DEBUG oslo.service.loopingcall [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 847.932510] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 847.932736] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6aa088a9-8cf7-4ca6-b6e1-f3a886931e99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 847.955577] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 847.955577] env[62619]: value = "task-1364911" [ 847.955577] env[62619]: _type = "Task" [ 847.955577] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 847.963062] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364911, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.072367] env[62619]: INFO nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Deleted allocations for instance d7ddbbe2-2bea-4fa1-938c-a344f49f0178 [ 848.077918] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d5ea69d9-a066-48a9-a82e-7cb719add2f1 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 80.574s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.114745] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ca2cbc-803e-4d8c-b70e-31020f239241, 'name': SearchDatastore_Task, 'duration_secs': 0.008505} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.115331] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.115331] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.115466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.115684] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.115837] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 848.116066] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f408d764-f4fe-4a62-8fe2-290b0a4223ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.122275] env[62619]: DEBUG nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 848.135415] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 848.135619] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 848.136454] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-14cfd088-7c6a-4da6-9825-4d2bc48c3475 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.142250] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 848.142250] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527df911-93d6-91c0-ecfb-27e0c33810ec" [ 848.142250] env[62619]: _type = "Task" [ 848.142250] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.155362] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527df911-93d6-91c0-ecfb-27e0c33810ec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.249637] env[62619]: DEBUG nova.network.neutron [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Successfully created port: 5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 848.356457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e96b71-5e83-4259-958d-c9865ec27aeb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.365785] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d56a49-45ae-40be-9f95-7dad6ed83ea1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.400266] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-696b5b93-2cf0-4da5-92f0-93a7decf01d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.407748] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf581302-58cf-40d6-bcaa-ddfd0768b25f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.424865] env[62619]: DEBUG nova.compute.provider_tree [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 848.465115] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364911, 'name': CreateVM_Task, 'duration_secs': 0.349263} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.465304] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 848.466073] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 848.466246] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 848.466568] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 848.466813] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9066e7da-640d-4fba-8005-2e1fa2befeac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.474226] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 848.474226] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525e900a-ef50-c416-a022-c907d11fe33e" [ 848.474226] env[62619]: _type = "Task" [ 848.474226] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.478917] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525e900a-ef50-c416-a022-c907d11fe33e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.508500] env[62619]: DEBUG nova.network.neutron [req-7e46e095-be92-403e-94c3-ad03c5f78e25 req-a208e16b-8000-4aa3-a444-fd304845e48b service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Updated VIF entry in instance network info cache for port 3f368f58-8e8a-4d73-a8fa-31466fbc92ee. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 848.508845] env[62619]: DEBUG nova.network.neutron [req-7e46e095-be92-403e-94c3-ad03c5f78e25 req-a208e16b-8000-4aa3-a444-fd304845e48b service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Updating instance_info_cache with network_info: [{"id": "3f368f58-8e8a-4d73-a8fa-31466fbc92ee", "address": "fa:16:3e:34:47:0a", "network": {"id": "d56f02e2-30ba-47cc-b4e5-d23064c8c463", "bridge": "br-int", "label": "tempest-ServerMetadataTestJSON-723575391-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b806f620846340798e9257fb384a3d31", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c8ee8640-3787-4c27-9581-962ddb2be7e5", "external-id": "nsx-vlan-transportzone-224", "segmentation_id": 224, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap3f368f58-8e", "ovs_interfaceid": "3f368f58-8e8a-4d73-a8fa-31466fbc92ee", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.584912] env[62619]: DEBUG nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 848.587512] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "d7ddbbe2-2bea-4fa1-938c-a344f49f0178" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 96.648s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 848.641966] env[62619]: INFO nova.virt.block_device [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Booting with volume fb506c39-d341-4d52-9b16-5d2ed093a6cc at /dev/sda [ 848.648043] env[62619]: DEBUG nova.network.neutron [req-37c75677-febc-48f6-9727-95325788311c req-986e34e2-7ff4-4880-9208-854241a24348 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Updated VIF entry in instance network info cache for port b245d7ba-eb3c-4286-840a-250524a98571. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 848.648043] env[62619]: DEBUG nova.network.neutron [req-37c75677-febc-48f6-9727-95325788311c req-986e34e2-7ff4-4880-9208-854241a24348 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Updating instance_info_cache with network_info: [{"id": "9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50", "address": "fa:16:3e:08:97:70", "network": {"id": "3310b65e-5f7a-403d-b9a7-edc5ed11ec70", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-42072988", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd104b68e8640f7a50da22df521f2d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a2f13f9-f4", "ovs_interfaceid": "9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "b245d7ba-eb3c-4286-840a-250524a98571", "address": "fa:16:3e:0c:95:8e", "network": {"id": "e703c5f5-4850-44f3-843c-48fe531d0660", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1158257932", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.68", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "3cd104b68e8640f7a50da22df521f2d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1002b79b-224e-41e3-a484-4245a767147a", "external-id": "nsx-vlan-transportzone-353", "segmentation_id": 353, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb245d7ba-eb", "ovs_interfaceid": "b245d7ba-eb3c-4286-840a-250524a98571", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 848.657203] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527df911-93d6-91c0-ecfb-27e0c33810ec, 'name': SearchDatastore_Task, 'duration_secs': 0.009114} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.658276] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5c4dd58e-5441-4b77-9022-1c208d3d0972 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.662835] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 848.662835] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524a1ac8-e9f4-c6ed-8f57-e289c16fcc82" [ 848.662835] env[62619]: _type = "Task" [ 848.662835] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 848.672389] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524a1ac8-e9f4-c6ed-8f57-e289c16fcc82, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 848.691735] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fbc507b5-4c4d-4f8e-8d6b-38c3a937ac3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.700113] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8dad554-55cd-4f3b-be75-75a8b7b9bb19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.722723] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-e34544f8-7ddf-4e30-b902-be836ba6b270 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.730722] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e572c3b0-c899-496b-8a0b-ff4b59af345e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.753668] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3363bd0-9e60-45c7-8872-b4c1067992cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.759833] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9d1f8b8-a9d5-4f72-a3bd-17f97f3e895f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 848.773721] env[62619]: DEBUG nova.virt.block_device [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Updating existing volume attachment record: d69f8a7c-f5e7-4f3e-a7d6-948fd3dfb7b9 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 848.928475] env[62619]: DEBUG nova.scheduler.client.report [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 848.984767] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525e900a-ef50-c416-a022-c907d11fe33e, 'name': SearchDatastore_Task, 'duration_secs': 0.008891} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 848.985050] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 848.985050] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 848.986073] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 849.011878] env[62619]: DEBUG oslo_concurrency.lockutils [req-7e46e095-be92-403e-94c3-ad03c5f78e25 req-a208e16b-8000-4aa3-a444-fd304845e48b service nova] Releasing lock "refresh_cache-29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.090718] env[62619]: DEBUG nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 849.112335] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.150239] env[62619]: DEBUG oslo_concurrency.lockutils [req-37c75677-febc-48f6-9727-95325788311c req-986e34e2-7ff4-4880-9208-854241a24348 service nova] Releasing lock "refresh_cache-61d68c36-5251-4fad-9d3b-125296ae0861" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.173755] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524a1ac8-e9f4-c6ed-8f57-e289c16fcc82, 'name': SearchDatastore_Task, 'duration_secs': 0.010004} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.173988] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 849.174254] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a/29f16f05-fe2f-4c16-ab8c-6fb210bbce8a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 849.174515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 849.174690] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 849.174889] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d098f5d2-e11d-43ef-a02e-587e5372a7ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.176668] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c45e84a3-322f-46fe-ae47-546eb11f9861 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.183542] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 849.183542] env[62619]: value = "task-1364912" [ 849.183542] env[62619]: _type = "Task" [ 849.183542] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.187068] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 849.187235] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 849.188605] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8385386a-de9f-4435-8202-0e16e57ef503 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.193452] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364912, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.196109] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 849.196109] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a8dd62-b129-7249-e2b6-9d17ab15ae3c" [ 849.196109] env[62619]: _type = "Task" [ 849.196109] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.203159] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a8dd62-b129-7249-e2b6-9d17ab15ae3c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.391603] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "9fd66533-39ff-401d-81ef-f37eaceb3103" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.392031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.392304] env[62619]: INFO nova.compute.manager [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Shelving [ 849.433150] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.324s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.433750] env[62619]: DEBUG nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 849.436520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.769s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.617503] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.692892] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364912, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.45639} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.693122] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a/29f16f05-fe2f-4c16-ab8c-6fb210bbce8a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 849.693344] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 849.693598] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6598f98d-958d-48c8-a4e5-085c711dcefc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.700912] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 849.700912] env[62619]: value = "task-1364913" [ 849.700912] env[62619]: _type = "Task" [ 849.700912] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.707422] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a8dd62-b129-7249-e2b6-9d17ab15ae3c, 'name': SearchDatastore_Task, 'duration_secs': 0.007364} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 849.708486] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ae8b26fa-5359-408e-bde0-40f299e96b99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.713979] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364913, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.717565] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 849.717565] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5277bf9c-c9d1-c1ae-8d42-25a2691df8de" [ 849.717565] env[62619]: _type = "Task" [ 849.717565] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.725582] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5277bf9c-c9d1-c1ae-8d42-25a2691df8de, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.899784] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 849.899981] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b54a9b16-2caa-4213-9b73-2eef5a356f9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 849.907271] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 849.907271] env[62619]: value = "task-1364914" [ 849.907271] env[62619]: _type = "Task" [ 849.907271] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 849.916346] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364914, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 849.945555] env[62619]: DEBUG nova.compute.utils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 849.948326] env[62619]: DEBUG nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 849.948326] env[62619]: DEBUG nova.network.neutron [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 849.995581] env[62619]: DEBUG nova.compute.manager [req-f63d9b1c-268e-4cff-b444-8a3dde5783a5 req-b3977f28-e8a2-4ca6-a8c7-c14d282129fb service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Received event network-vif-plugged-5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 849.995581] env[62619]: DEBUG oslo_concurrency.lockutils [req-f63d9b1c-268e-4cff-b444-8a3dde5783a5 req-b3977f28-e8a2-4ca6-a8c7-c14d282129fb service nova] Acquiring lock "c30e0db3-9b63-44b7-9b7f-810defc530d1-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 849.998364] env[62619]: DEBUG oslo_concurrency.lockutils [req-f63d9b1c-268e-4cff-b444-8a3dde5783a5 req-b3977f28-e8a2-4ca6-a8c7-c14d282129fb service nova] Lock "c30e0db3-9b63-44b7-9b7f-810defc530d1-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 849.998364] env[62619]: DEBUG oslo_concurrency.lockutils [req-f63d9b1c-268e-4cff-b444-8a3dde5783a5 req-b3977f28-e8a2-4ca6-a8c7-c14d282129fb service nova] Lock "c30e0db3-9b63-44b7-9b7f-810defc530d1-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 849.998364] env[62619]: DEBUG nova.compute.manager [req-f63d9b1c-268e-4cff-b444-8a3dde5783a5 req-b3977f28-e8a2-4ca6-a8c7-c14d282129fb service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] No waiting events found dispatching network-vif-plugged-5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 849.998364] env[62619]: WARNING nova.compute.manager [req-f63d9b1c-268e-4cff-b444-8a3dde5783a5 req-b3977f28-e8a2-4ca6-a8c7-c14d282129fb service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Received unexpected event network-vif-plugged-5f9d96e1-ec69-4313-99e3-a91700248c54 for instance with vm_state building and task_state block_device_mapping. [ 850.001897] env[62619]: DEBUG nova.policy [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '110e8fd00bdd4ecfbc36a760b3c48a1e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'adbb54682238402b8aaeb002439e38d5', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 850.146711] env[62619]: DEBUG nova.network.neutron [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Successfully updated port: 5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 850.194707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4276424-e3a4-4a92-9d88-8971c62d27a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.205823] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a45b0d1-baa6-45e6-a971-5699a5fd812e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.215253] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364913, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057233} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.240290] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 850.241178] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c20b2b2-01fb-49ee-ad25-343d9be1a520 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.246842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07688a2f-f794-40c9-a321-bf72bf81ad6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.254806] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5277bf9c-c9d1-c1ae-8d42-25a2691df8de, 'name': SearchDatastore_Task, 'duration_secs': 0.008433} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.265551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 850.265832] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 61d68c36-5251-4fad-9d3b-125296ae0861/61d68c36-5251-4fad-9d3b-125296ae0861.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 850.275500] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Reconfiguring VM instance instance-00000042 to attach disk [datastore2] 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a/29f16f05-fe2f-4c16-ab8c-6fb210bbce8a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 850.275500] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f1974bc1-b532-4616-8f22-5512ce17e3d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.278148] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e45af9a3-6b9c-4857-b347-5242a31359dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.281788] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a5e23c2f-0e6d-4e57-99ca-8a55fb3b7e14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.295857] env[62619]: DEBUG nova.network.neutron [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Successfully created port: 9e4efb80-3307-4c94-9bac-f76ba96bc57d {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 850.309950] env[62619]: DEBUG nova.compute.provider_tree [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 850.312942] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 850.312942] env[62619]: value = "task-1364915" [ 850.312942] env[62619]: _type = "Task" [ 850.312942] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.313175] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 850.313175] env[62619]: value = "task-1364916" [ 850.313175] env[62619]: _type = "Task" [ 850.313175] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.325421] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364915, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.328717] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364916, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.421705] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364914, 'name': PowerOffVM_Task, 'duration_secs': 0.185157} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.422049] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 850.423024] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aba4eb9e-8d94-4061-837e-99d0ec5d4964 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.442505] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472fd2ad-0513-482e-ae6e-20cb8e74e341 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.451224] env[62619]: DEBUG nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 850.649742] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Acquiring lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 850.649951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Acquired lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 850.650081] env[62619]: DEBUG nova.network.neutron [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 850.815115] env[62619]: DEBUG nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 850.836397] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364916, 'name': ReconfigVM_Task, 'duration_secs': 0.338531} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.836912] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364915, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.479008} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 850.837076] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Reconfigured VM instance instance-00000042 to attach disk [datastore2] 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a/29f16f05-fe2f-4c16-ab8c-6fb210bbce8a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 850.837804] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 61d68c36-5251-4fad-9d3b-125296ae0861/61d68c36-5251-4fad-9d3b-125296ae0861.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 850.838125] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 850.838422] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-31a738f9-2509-4e70-b1fd-918ce64fe13a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.840966] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-6d4c31c4-c4a3-488e-8a89-6c56a9a55acf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.853373] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 850.853373] env[62619]: value = "task-1364918" [ 850.853373] env[62619]: _type = "Task" [ 850.853373] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.855019] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 850.855019] env[62619]: value = "task-1364917" [ 850.855019] env[62619]: _type = "Task" [ 850.855019] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.865816] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364918, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.869213] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364917, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 850.874703] env[62619]: DEBUG nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 850.874703] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 850.874703] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 850.874910] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 850.874910] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 850.875073] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 850.875230] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 850.875438] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 850.875604] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 850.875779] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 850.875943] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 850.876140] env[62619]: DEBUG nova.virt.hardware [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 850.876923] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26c052a6-582e-41d1-960a-739daa0c765b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.884814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75527fd0-8c25-49d5-a41b-d326df99e8af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.955084] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 850.958861] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-f105afa7-068e-4fc1-90b8-6d8eded1cd8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 850.966393] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 850.966393] env[62619]: value = "task-1364919" [ 850.966393] env[62619]: _type = "Task" [ 850.966393] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 850.975286] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364919, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.184256] env[62619]: DEBUG nova.network.neutron [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 851.327859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.891s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.328615] env[62619]: ERROR nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Traceback (most recent call last): [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self.driver.spawn(context, instance, image_meta, [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self._vmops.spawn(context, instance, image_meta, injected_files, [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] vm_ref = self.build_virtual_machine(instance, [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] vif_infos = vmwarevif.get_vif_info(self._session, [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] for vif in network_info: [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] return self._sync_wrapper(fn, *args, **kwargs) [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self.wait() [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self[:] = self._gt.wait() [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] return self._exit_event.wait() [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 124, in wait [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] result = hub.switch() [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 310, in switch [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] return self.greenlet.switch() [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] result = function(*args, **kwargs) [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] return func(*args, **kwargs) [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] raise e [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] nwinfo = self.network_api.allocate_for_instance( [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] created_port_ids = self._update_ports_for_instance( [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] with excutils.save_and_reraise_exception(): [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 851.328615] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] self.force_reraise() [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] raise self.value [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] updated_port = self._update_port( [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] _ensure_no_port_binding_failure(port) [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] raise exception.PortBindingFailed(port_id=port['id']) [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] nova.exception.PortBindingFailed: Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. [ 851.329808] env[62619]: ERROR nova.compute.manager [instance: 42e05759-742f-4732-97cb-cab2cfb06996] [ 851.329808] env[62619]: DEBUG nova.compute.utils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 851.331704] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 13.757s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.338870] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Build of instance 42e05759-742f-4732-97cb-cab2cfb06996 was re-scheduled: Binding failed for port 993811f2-1949-4f82-a090-188abe4805c8, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 851.343179] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 851.343447] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquiring lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 851.343653] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Acquired lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 851.343780] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 851.370031] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364918, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070032} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.371710] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 851.372133] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364917, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.372937] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db46361-5e7c-46bf-89b9-c7c75aa0fe5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.401233] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Reconfiguring VM instance instance-00000041 to attach disk [datastore2] 61d68c36-5251-4fad-9d3b-125296ae0861/61d68c36-5251-4fad-9d3b-125296ae0861.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 851.402618] env[62619]: DEBUG nova.network.neutron [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Updating instance_info_cache with network_info: [{"id": "5f9d96e1-ec69-4313-99e3-a91700248c54", "address": "fa:16:3e:09:e6:e7", "network": {"id": "10f913fe-f846-48d1-b9eb-8479a1b45319", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-767095649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "944cd46054cf4352a1dff3284bd5a88c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f9d96e1-ec", "ovs_interfaceid": "5f9d96e1-ec69-4313-99e3-a91700248c54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 851.404219] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e3290532-55ab-4930-ac42-547cf6b8abb0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.427174] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 851.427174] env[62619]: value = "task-1364920" [ 851.427174] env[62619]: _type = "Task" [ 851.427174] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.435645] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364920, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.462939] env[62619]: DEBUG nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 851.475586] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364919, 'name': CreateSnapshot_Task, 'duration_secs': 0.477228} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.475902] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 851.476678] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3531f436-d7a6-43cc-9391-f68f9051442c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.493289] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 851.493512] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 851.493734] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 851.493974] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 851.494176] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 851.494606] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 851.494606] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 851.494807] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 851.495367] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 851.495367] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 851.495367] env[62619]: DEBUG nova.virt.hardware [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 851.496206] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a0c491f-4890-40e5-8dfe-7b23e1262884 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.504064] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d10447-389c-455b-9758-06792e67c2ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.865558] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 851.870979] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364917, 'name': Rename_Task, 'duration_secs': 0.865435} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 851.871093] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 851.871913] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-66515c6e-264e-4928-a3ac-9a8c71ce46b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.877195] env[62619]: DEBUG nova.compute.manager [req-ad7b2541-28b2-4eb3-815b-c53fa563a349 req-3a30fee8-50d3-4cbc-a8c5-9585dde0f375 service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Received event network-vif-plugged-9e4efb80-3307-4c94-9bac-f76ba96bc57d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 851.877417] env[62619]: DEBUG oslo_concurrency.lockutils [req-ad7b2541-28b2-4eb3-815b-c53fa563a349 req-3a30fee8-50d3-4cbc-a8c5-9585dde0f375 service nova] Acquiring lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 851.877623] env[62619]: DEBUG oslo_concurrency.lockutils [req-ad7b2541-28b2-4eb3-815b-c53fa563a349 req-3a30fee8-50d3-4cbc-a8c5-9585dde0f375 service nova] Lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 851.877795] env[62619]: DEBUG oslo_concurrency.lockutils [req-ad7b2541-28b2-4eb3-815b-c53fa563a349 req-3a30fee8-50d3-4cbc-a8c5-9585dde0f375 service nova] Lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 851.877980] env[62619]: DEBUG nova.compute.manager [req-ad7b2541-28b2-4eb3-815b-c53fa563a349 req-3a30fee8-50d3-4cbc-a8c5-9585dde0f375 service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] No waiting events found dispatching network-vif-plugged-9e4efb80-3307-4c94-9bac-f76ba96bc57d {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 851.878255] env[62619]: WARNING nova.compute.manager [req-ad7b2541-28b2-4eb3-815b-c53fa563a349 req-3a30fee8-50d3-4cbc-a8c5-9585dde0f375 service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Received unexpected event network-vif-plugged-9e4efb80-3307-4c94-9bac-f76ba96bc57d for instance with vm_state building and task_state spawning. [ 851.879942] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 851.879942] env[62619]: value = "task-1364921" [ 851.879942] env[62619]: _type = "Task" [ 851.879942] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.888111] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364921, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.920927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Releasing lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 851.921178] env[62619]: DEBUG nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Instance network_info: |[{"id": "5f9d96e1-ec69-4313-99e3-a91700248c54", "address": "fa:16:3e:09:e6:e7", "network": {"id": "10f913fe-f846-48d1-b9eb-8479a1b45319", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-767095649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "944cd46054cf4352a1dff3284bd5a88c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f9d96e1-ec", "ovs_interfaceid": "5f9d96e1-ec69-4313-99e3-a91700248c54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 851.921627] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:09:e6:e7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '3f9ffee1-f413-4f28-8bc4-3fb2cf299789', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f9d96e1-ec69-4313-99e3-a91700248c54', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 851.929158] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Creating folder: Project (944cd46054cf4352a1dff3284bd5a88c). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.936219] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-40c421ba-c2fa-4fc0-9581-3723929d0f3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.947958] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364920, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 851.952473] env[62619]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 851.952703] env[62619]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=62619) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 851.953091] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Folder already exists: Project (944cd46054cf4352a1dff3284bd5a88c). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1609}} [ 851.953332] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Creating folder: Instances. Parent ref: group-v290459. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 851.953604] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ee8eeff7-3522-4104-9f9a-8bcc72a868e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.963016] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Created folder: Instances in parent group-v290459. [ 851.963256] env[62619]: DEBUG oslo.service.loopingcall [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 851.963451] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 851.963653] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-67b2d711-f168-44b3-8674-6daf5eb50acc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 851.978144] env[62619]: DEBUG nova.network.neutron [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Successfully updated port: 9e4efb80-3307-4c94-9bac-f76ba96bc57d {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 851.986626] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 851.986626] env[62619]: value = "task-1364924" [ 851.986626] env[62619]: _type = "Task" [ 851.986626] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 851.999085] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 852.004802] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-59cc2d20-845f-463f-b625-1cb1973bcc2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.009941] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364924, 'name': CreateVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.014115] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 852.014115] env[62619]: value = "task-1364925" [ 852.014115] env[62619]: _type = "Task" [ 852.014115] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.022325] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.028306] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364925, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.030816] env[62619]: DEBUG nova.compute.manager [req-d9b37060-e1ef-45bd-97c7-794b07eecaa8 req-385f17fa-ff14-4a23-a79b-970bf1cb03b5 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Received event network-changed-5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 852.031057] env[62619]: DEBUG nova.compute.manager [req-d9b37060-e1ef-45bd-97c7-794b07eecaa8 req-385f17fa-ff14-4a23-a79b-970bf1cb03b5 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Refreshing instance network info cache due to event network-changed-5f9d96e1-ec69-4313-99e3-a91700248c54. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 852.031321] env[62619]: DEBUG oslo_concurrency.lockutils [req-d9b37060-e1ef-45bd-97c7-794b07eecaa8 req-385f17fa-ff14-4a23-a79b-970bf1cb03b5 service nova] Acquiring lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.031468] env[62619]: DEBUG oslo_concurrency.lockutils [req-d9b37060-e1ef-45bd-97c7-794b07eecaa8 req-385f17fa-ff14-4a23-a79b-970bf1cb03b5 service nova] Acquired lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.031626] env[62619]: DEBUG nova.network.neutron [req-d9b37060-e1ef-45bd-97c7-794b07eecaa8 req-385f17fa-ff14-4a23-a79b-970bf1cb03b5 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Refreshing network info cache for port 5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 852.105073] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-baaf5b0f-ecb6-4e12-b48d-73f2ebf58ab8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.112724] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b04583da-262f-4da1-9283-ab63c213d8e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.145998] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-886774d0-233f-484a-88bb-8ec3cd042789 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.152930] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1048ff6a-1344-44ec-bbf8-ea66e59a30b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.167094] env[62619]: DEBUG nova.compute.provider_tree [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 852.391067] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364921, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.446849] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364920, 'name': ReconfigVM_Task, 'duration_secs': 0.633901} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.447224] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Reconfigured VM instance instance-00000041 to attach disk [datastore2] 61d68c36-5251-4fad-9d3b-125296ae0861/61d68c36-5251-4fad-9d3b-125296ae0861.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 852.447888] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-67343b39-eba3-4293-931c-d26d719f8982 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.454877] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 852.454877] env[62619]: value = "task-1364926" [ 852.454877] env[62619]: _type = "Task" [ 852.454877] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.463900] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364926, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.482826] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquiring lock "refresh_cache-cfa74201-783f-4ef4-8860-e2f53e4dfb81" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 852.483076] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquired lock "refresh_cache-cfa74201-783f-4ef4-8860-e2f53e4dfb81" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 852.483275] env[62619]: DEBUG nova.network.neutron [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 852.497549] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364924, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.524464] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364925, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.529515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Releasing lock "refresh_cache-42e05759-742f-4732-97cb-cab2cfb06996" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 852.529768] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 852.529957] env[62619]: DEBUG nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 852.530162] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 852.545906] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 852.670671] env[62619]: DEBUG nova.scheduler.client.report [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 852.751105] env[62619]: DEBUG nova.network.neutron [req-d9b37060-e1ef-45bd-97c7-794b07eecaa8 req-385f17fa-ff14-4a23-a79b-970bf1cb03b5 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Updated VIF entry in instance network info cache for port 5f9d96e1-ec69-4313-99e3-a91700248c54. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 852.751511] env[62619]: DEBUG nova.network.neutron [req-d9b37060-e1ef-45bd-97c7-794b07eecaa8 req-385f17fa-ff14-4a23-a79b-970bf1cb03b5 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Updating instance_info_cache with network_info: [{"id": "5f9d96e1-ec69-4313-99e3-a91700248c54", "address": "fa:16:3e:09:e6:e7", "network": {"id": "10f913fe-f846-48d1-b9eb-8479a1b45319", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-767095649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "944cd46054cf4352a1dff3284bd5a88c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f9d96e1-ec", "ovs_interfaceid": "5f9d96e1-ec69-4313-99e3-a91700248c54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 852.891648] env[62619]: DEBUG oslo_vmware.api [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364921, 'name': PowerOnVM_Task, 'duration_secs': 0.624353} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.892351] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 852.892523] env[62619]: INFO nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Took 7.82 seconds to spawn the instance on the hypervisor. [ 852.892696] env[62619]: DEBUG nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 852.893486] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7691f670-e120-48ef-9830-d6d91527a7d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.965688] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364926, 'name': Rename_Task, 'duration_secs': 0.159715} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.966194] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 852.966574] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-81f53209-7836-410e-8239-9aba300911dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 852.973605] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 852.973605] env[62619]: value = "task-1364927" [ 852.973605] env[62619]: _type = "Task" [ 852.973605] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 852.980956] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364927, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 852.996141] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364924, 'name': CreateVM_Task, 'duration_secs': 0.674495} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 852.996310] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 852.996945] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290463', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'name': 'volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c30e0db3-9b63-44b7-9b7f-810defc530d1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'serial': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc'}, 'device_type': None, 'mount_device': '/dev/sda', 'attachment_id': 'd69f8a7c-f5e7-4f3e-a7d6-948fd3dfb7b9', 'boot_index': 0, 'guest_format': None, 'delete_on_termination': True, 'disk_bus': None, 'volume_type': None}], 'swap': None} {{(pid=62619) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 852.997169] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Root volume attach. Driver type: vmdk {{(pid=62619) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 852.997905] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5598214-a33f-4af2-88cd-db05ddcb0825 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.005765] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1aff0f36-9c9a-4296-865c-2d719cd925b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.011952] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53f23bd2-2f30-4c6d-9cb4-f9d0bef6192c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.018113] env[62619]: DEBUG nova.network.neutron [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.022214] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-d2fa041f-c695-41ca-919d-c3cf67940a01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.027274] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364925, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.032592] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 853.032592] env[62619]: value = "task-1364928" [ 853.032592] env[62619]: _type = "Task" [ 853.032592] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.042330] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.048810] env[62619]: DEBUG nova.network.neutron [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.161775] env[62619]: DEBUG nova.network.neutron [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Updating instance_info_cache with network_info: [{"id": "9e4efb80-3307-4c94-9bac-f76ba96bc57d", "address": "fa:16:3e:9b:a6:eb", "network": {"id": "fe89a09f-116b-4a5c-8b15-00f2ea5a8d6c", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1499812187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adbb54682238402b8aaeb002439e38d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e4efb80-33", "ovs_interfaceid": "9e4efb80-3307-4c94-9bac-f76ba96bc57d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.177608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 1.846s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.178300] env[62619]: ERROR nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Failed to build and run instance: nova.exception.PortBindingFailed: Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Traceback (most recent call last): [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/compute/manager.py", line 2641, in _build_and_run_instance [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self.driver.spawn(context, instance, image_meta, [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 540, in spawn [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 753, in spawn [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] vm_ref = self.build_virtual_machine(instance, [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 275, in build_virtual_machine [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] vif_infos = vmwarevif.get_vif_info(self._session, [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/virt/vmwareapi/vif.py", line 119, in get_vif_info [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] for vif in network_info: [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/model.py", line 612, in __iter__ [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] return self._sync_wrapper(fn, *args, **kwargs) [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/model.py", line 603, in _sync_wrapper [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self.wait() [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/model.py", line 635, in wait [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self[:] = self._gt.wait() [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 225, in wait [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] return self._exit_event.wait() [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 131, in wait [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] current.throw(*self._exc) [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 265, in main [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] result = function(*args, **kwargs) [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] return func(*args, **kwargs) [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/compute/manager.py", line 2011, in _allocate_network_async [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] raise e [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/compute/manager.py", line 1989, in _allocate_network_async [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] nwinfo = self.network_api.allocate_for_instance( [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 1229, in allocate_for_instance [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] created_port_ids = self._update_ports_for_instance( [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 1365, in _update_ports_for_instance [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] with excutils.save_and_reraise_exception(): [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] self.force_reraise() [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 853.178300] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] raise self.value [ 853.179375] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 1340, in _update_ports_for_instance [ 853.179375] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] updated_port = self._update_port( [ 853.179375] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 585, in _update_port [ 853.179375] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] _ensure_no_port_binding_failure(port) [ 853.179375] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] File "/opt/stack/nova/nova/network/neutron.py", line 294, in _ensure_no_port_binding_failure [ 853.179375] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] raise exception.PortBindingFailed(port_id=port['id']) [ 853.179375] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] nova.exception.PortBindingFailed: Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. [ 853.179375] env[62619]: ERROR nova.compute.manager [instance: 30045f41-3396-47cb-833d-b5b434c3671b] [ 853.179375] env[62619]: DEBUG nova.compute.utils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 853.181054] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.212s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 853.182085] env[62619]: INFO nova.compute.claims [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 853.184962] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Build of instance 30045f41-3396-47cb-833d-b5b434c3671b was re-scheduled: Binding failed for port f2af30b8-90ff-4267-8101-47d1f59d2f89, please check neutron logs for more information. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2480}} [ 853.185513] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Unplugging VIFs for instance {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3006}} [ 853.185771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.185950] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.186176] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 853.253971] env[62619]: DEBUG oslo_concurrency.lockutils [req-d9b37060-e1ef-45bd-97c7-794b07eecaa8 req-385f17fa-ff14-4a23-a79b-970bf1cb03b5 service nova] Releasing lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.412719] env[62619]: INFO nova.compute.manager [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Took 26.92 seconds to build instance. [ 853.484777] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364927, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.526386] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364925, 'name': CloneVM_Task, 'duration_secs': 1.180669} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 853.526718] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Created linked-clone VM from snapshot [ 853.527534] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030e1b6b-16a8-45f1-9adf-8c572ada9840 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.539664] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Uploading image ac757367-2c71-4040-8956-e58c49d088b3 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 853.547420] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task} progress is 38%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.551174] env[62619]: INFO nova.compute.manager [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] [instance: 42e05759-742f-4732-97cb-cab2cfb06996] Took 1.02 seconds to deallocate network for instance. [ 853.567709] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 853.567709] env[62619]: value = "vm-290476" [ 853.567709] env[62619]: _type = "VirtualMachine" [ 853.567709] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 853.568108] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-78b211d4-cc1b-4b7e-83a7-b7a36a1d972e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.577037] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lease: (returnval){ [ 853.577037] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52906c56-feb3-5f9b-a62a-994bcf60d1ed" [ 853.577037] env[62619]: _type = "HttpNfcLease" [ 853.577037] env[62619]: } obtained for exporting VM: (result){ [ 853.577037] env[62619]: value = "vm-290476" [ 853.577037] env[62619]: _type = "VirtualMachine" [ 853.577037] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 853.577037] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the lease: (returnval){ [ 853.577037] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52906c56-feb3-5f9b-a62a-994bcf60d1ed" [ 853.577037] env[62619]: _type = "HttpNfcLease" [ 853.577037] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 853.583738] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 853.583738] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52906c56-feb3-5f9b-a62a-994bcf60d1ed" [ 853.583738] env[62619]: _type = "HttpNfcLease" [ 853.583738] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 853.664899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Releasing lock "refresh_cache-cfa74201-783f-4ef4-8860-e2f53e4dfb81" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 853.665260] env[62619]: DEBUG nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Instance network_info: |[{"id": "9e4efb80-3307-4c94-9bac-f76ba96bc57d", "address": "fa:16:3e:9b:a6:eb", "network": {"id": "fe89a09f-116b-4a5c-8b15-00f2ea5a8d6c", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1499812187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adbb54682238402b8aaeb002439e38d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e4efb80-33", "ovs_interfaceid": "9e4efb80-3307-4c94-9bac-f76ba96bc57d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 853.665741] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9b:a6:eb', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'dad4f433-bb0b-45c7-8040-972ef2277f75', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9e4efb80-3307-4c94-9bac-f76ba96bc57d', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 853.674098] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Creating folder: Project (adbb54682238402b8aaeb002439e38d5). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.674439] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b985b6e-c8c9-462a-9d6e-2b9afe2bb9ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.684876] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Created folder: Project (adbb54682238402b8aaeb002439e38d5) in parent group-v290436. [ 853.685229] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Creating folder: Instances. Parent ref: group-v290477. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 853.685515] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e15d991c-b39b-426c-9992-b42d62f8e8cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.696246] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Created folder: Instances in parent group-v290477. [ 853.696246] env[62619]: DEBUG oslo.service.loopingcall [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 853.696471] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 853.696508] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fa49465b-43aa-43a9-81bd-7ec734405991 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.712633] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 853.720620] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 853.720620] env[62619]: value = "task-1364932" [ 853.720620] env[62619]: _type = "Task" [ 853.720620] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.729726] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364932, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.800519] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 853.905301] env[62619]: DEBUG nova.compute.manager [req-cc7cfd47-767b-4f84-8a1a-b06fe6e73232 req-ec8bc55c-6d9c-4935-897a-71dfbe0272ac service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Received event network-changed-9e4efb80-3307-4c94-9bac-f76ba96bc57d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 853.905412] env[62619]: DEBUG nova.compute.manager [req-cc7cfd47-767b-4f84-8a1a-b06fe6e73232 req-ec8bc55c-6d9c-4935-897a-71dfbe0272ac service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Refreshing instance network info cache due to event network-changed-9e4efb80-3307-4c94-9bac-f76ba96bc57d. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 853.905639] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc7cfd47-767b-4f84-8a1a-b06fe6e73232 req-ec8bc55c-6d9c-4935-897a-71dfbe0272ac service nova] Acquiring lock "refresh_cache-cfa74201-783f-4ef4-8860-e2f53e4dfb81" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 853.905749] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc7cfd47-767b-4f84-8a1a-b06fe6e73232 req-ec8bc55c-6d9c-4935-897a-71dfbe0272ac service nova] Acquired lock "refresh_cache-cfa74201-783f-4ef4-8860-e2f53e4dfb81" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 853.906396] env[62619]: DEBUG nova.network.neutron [req-cc7cfd47-767b-4f84-8a1a-b06fe6e73232 req-ec8bc55c-6d9c-4935-897a-71dfbe0272ac service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Refreshing network info cache for port 9e4efb80-3307-4c94-9bac-f76ba96bc57d {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 853.914545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6dc5103a-1830-444c-8c4b-be0feae49826 tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 78.780s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 853.985064] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364927, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.045510] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task} progress is 53%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.086134] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 854.086134] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52906c56-feb3-5f9b-a62a-994bcf60d1ed" [ 854.086134] env[62619]: _type = "HttpNfcLease" [ 854.086134] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 854.086134] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 854.086134] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52906c56-feb3-5f9b-a62a-994bcf60d1ed" [ 854.086134] env[62619]: _type = "HttpNfcLease" [ 854.086134] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 854.086134] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1212b82f-c46d-423e-b379-be1f54f94049 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.094537] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233cc65-504e-7eb3-b497-eb48b8588754/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 854.094738] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233cc65-504e-7eb3-b497-eb48b8588754/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 854.232633] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364932, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.303745] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-30045f41-3396-47cb-833d-b5b434c3671b" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 854.304107] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=62619) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3029}} [ 854.304362] env[62619]: DEBUG nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 854.304590] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 854.323476] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 854.368688] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-fdea3d36-eb78-435f-a064-1aea2b249ca1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.417798] env[62619]: DEBUG nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 854.448461] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb8671e-07b1-49c1-867b-381e5d429cec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.459474] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-921940a4-e77d-476c-8498-790d31acc119 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.498130] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dba31a2c-6a31-483c-8258-1f26d77c4dbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.507244] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364927, 'name': PowerOnVM_Task} progress is 68%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.513937] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-483651ba-2890-40fd-ac22-6c2b1a389466 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.532043] env[62619]: DEBUG nova.compute.provider_tree [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 854.547643] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task} progress is 65%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.590771] env[62619]: INFO nova.scheduler.client.report [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Deleted allocations for instance 42e05759-742f-4732-97cb-cab2cfb06996 [ 854.693440] env[62619]: DEBUG nova.network.neutron [req-cc7cfd47-767b-4f84-8a1a-b06fe6e73232 req-ec8bc55c-6d9c-4935-897a-71dfbe0272ac service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Updated VIF entry in instance network info cache for port 9e4efb80-3307-4c94-9bac-f76ba96bc57d. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 854.693915] env[62619]: DEBUG nova.network.neutron [req-cc7cfd47-767b-4f84-8a1a-b06fe6e73232 req-ec8bc55c-6d9c-4935-897a-71dfbe0272ac service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Updating instance_info_cache with network_info: [{"id": "9e4efb80-3307-4c94-9bac-f76ba96bc57d", "address": "fa:16:3e:9b:a6:eb", "network": {"id": "fe89a09f-116b-4a5c-8b15-00f2ea5a8d6c", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1499812187-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "adbb54682238402b8aaeb002439e38d5", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "dad4f433-bb0b-45c7-8040-972ef2277f75", "external-id": "nsx-vlan-transportzone-451", "segmentation_id": 451, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9e4efb80-33", "ovs_interfaceid": "9e4efb80-3307-4c94-9bac-f76ba96bc57d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.734648] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364932, 'name': CreateVM_Task, 'duration_secs': 0.690942} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.734868] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 854.735710] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 854.735941] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 854.736758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 854.736758] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-18e67a3e-a1bb-431d-9a9e-b12da759479f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.744988] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 854.744988] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527f730d-a737-ed9a-2699-10c31b344208" [ 854.744988] env[62619]: _type = "Task" [ 854.744988] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 854.756085] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527f730d-a737-ed9a-2699-10c31b344208, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.828615] env[62619]: DEBUG nova.network.neutron [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 854.939161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.005917] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364927, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.037450] env[62619]: DEBUG nova.scheduler.client.report [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 855.055586] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task} progress is 78%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.100075] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0218c3a4-d7df-47d4-9def-003978485426 tempest-ListServersNegativeTestJSON-1262728300 tempest-ListServersNegativeTestJSON-1262728300-project-member] Lock "42e05759-742f-4732-97cb-cab2cfb06996" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 103.122s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.197022] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc7cfd47-767b-4f84-8a1a-b06fe6e73232 req-ec8bc55c-6d9c-4935-897a-71dfbe0272ac service nova] Releasing lock "refresh_cache-cfa74201-783f-4ef4-8860-e2f53e4dfb81" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.258899] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527f730d-a737-ed9a-2699-10c31b344208, 'name': SearchDatastore_Task, 'duration_secs': 0.011155} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.259643] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 855.259771] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 855.260664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 855.260664] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 855.260664] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 855.260664] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c8fd214e-7712-43fe-b3bd-4401e5f30d23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.273776] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 855.273776] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 855.274095] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7bffb2eb-6365-4fdf-8528-97d80931ed8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.282565] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 855.282565] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52108b36-68cd-04da-3c0a-05999bf449c4" [ 855.282565] env[62619]: _type = "Task" [ 855.282565] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.293638] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52108b36-68cd-04da-3c0a-05999bf449c4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.331643] env[62619]: INFO nova.compute.manager [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 30045f41-3396-47cb-833d-b5b434c3671b] Took 1.03 seconds to deallocate network for instance. [ 855.506144] env[62619]: DEBUG oslo_vmware.api [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364927, 'name': PowerOnVM_Task, 'duration_secs': 2.100926} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.506555] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 855.506811] env[62619]: INFO nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Took 12.76 seconds to spawn the instance on the hypervisor. [ 855.507061] env[62619]: DEBUG nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 855.507949] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303f4856-a3c3-4221-a49d-90427d4dec2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.547899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.548514] env[62619]: DEBUG nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 855.556103] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 12.365s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.560596] env[62619]: INFO nova.compute.claims [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 855.567018] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task} progress is 92%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.602989] env[62619]: DEBUG nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 855.640929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquiring lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.641380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.641791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquiring lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 855.642663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 855.642663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 855.647151] env[62619]: INFO nova.compute.manager [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Terminating instance [ 855.648452] env[62619]: DEBUG nova.compute.manager [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 855.648650] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 855.649499] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a27800b7-d953-4073-af25-edf519c17372 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.658891] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 855.659275] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c3495b4-840d-4d4f-a1e7-30b0630e8aec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.666848] env[62619]: DEBUG oslo_vmware.api [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 855.666848] env[62619]: value = "task-1364933" [ 855.666848] env[62619]: _type = "Task" [ 855.666848] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.676928] env[62619]: DEBUG oslo_vmware.api [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364933, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 855.795167] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52108b36-68cd-04da-3c0a-05999bf449c4, 'name': SearchDatastore_Task, 'duration_secs': 0.021509} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 855.796394] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3368ae2-0f43-42e3-ad6f-26e07347e878 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.805233] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 855.805233] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b9e430-e995-9a6e-b08b-670037925070" [ 855.805233] env[62619]: _type = "Task" [ 855.805233] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 855.815353] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b9e430-e995-9a6e-b08b-670037925070, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.029994] env[62619]: INFO nova.compute.manager [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Took 31.38 seconds to build instance. [ 856.050548] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.068193] env[62619]: DEBUG nova.compute.utils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 856.074114] env[62619]: DEBUG nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 856.074114] env[62619]: DEBUG nova.network.neutron [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 856.128932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 856.146985] env[62619]: DEBUG nova.policy [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3041343376d4f2fad14577d5c412b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4270942193cd4a9aa397784368b9ae64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 856.177325] env[62619]: DEBUG oslo_vmware.api [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364933, 'name': PowerOffVM_Task, 'duration_secs': 0.210216} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.177683] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 856.177935] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 856.178387] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ad825ae8-afef-4d56-b034-6db158500f99 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.246086] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 856.246362] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 856.246589] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Deleting the datastore file [datastore2] 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 856.246931] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-709ff542-ea24-481c-ab42-136ce7375406 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.253705] env[62619]: DEBUG oslo_vmware.api [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for the task: (returnval){ [ 856.253705] env[62619]: value = "task-1364935" [ 856.253705] env[62619]: _type = "Task" [ 856.253705] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.261717] env[62619]: DEBUG oslo_vmware.api [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364935, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.314344] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b9e430-e995-9a6e-b08b-670037925070, 'name': SearchDatastore_Task, 'duration_secs': 0.027679} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.314644] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 856.314908] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] cfa74201-783f-4ef4-8860-e2f53e4dfb81/cfa74201-783f-4ef4-8860-e2f53e4dfb81.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 856.315195] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a524ba27-b692-4868-81ff-dc0688184001 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.324243] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 856.324243] env[62619]: value = "task-1364936" [ 856.324243] env[62619]: _type = "Task" [ 856.324243] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 856.332953] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364936, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.366894] env[62619]: INFO nova.scheduler.client.report [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted allocations for instance 30045f41-3396-47cb-833d-b5b434c3671b [ 856.537257] env[62619]: DEBUG oslo_concurrency.lockutils [None req-069e519c-c21f-4ca0-bd65-c338d54157d0 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "61d68c36-5251-4fad-9d3b-125296ae0861" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 84.374s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.548623] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.574851] env[62619]: DEBUG nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 856.672602] env[62619]: DEBUG nova.network.neutron [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Successfully created port: 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 856.765728] env[62619]: DEBUG oslo_vmware.api [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Task: {'id': task-1364935, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.294272} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 856.766866] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 856.766866] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 856.767046] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 856.767159] env[62619]: INFO nova.compute.manager [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Took 1.12 seconds to destroy the instance on the hypervisor. [ 856.767360] env[62619]: DEBUG oslo.service.loopingcall [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 856.767821] env[62619]: DEBUG nova.compute.manager [-] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 856.767921] env[62619]: DEBUG nova.network.neutron [-] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 856.837294] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364936, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 856.839143] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79c33f9e-1451-4afb-90a1-f63b8751a6f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.848467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-312ebf20-1ec7-43cc-9b33-e0bdb1a43662 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.886108] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3753763e-8ee5-4c77-a12d-be9c7374a1d6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "30045f41-3396-47cb-833d-b5b434c3671b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 94.904s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 856.888776] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ccf66cd-af0b-4c96-99e9-242a170a7a37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.897365] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04af4cae-66ed-4e9b-b8c3-d79d47b47b3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.913691] env[62619]: DEBUG nova.compute.provider_tree [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.042992] env[62619]: DEBUG nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 857.052299] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364928, 'name': RelocateVM_Task, 'duration_secs': 3.517849} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.053151] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 857.053151] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290463', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'name': 'volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c30e0db3-9b63-44b7-9b7f-810defc530d1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'serial': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 857.054123] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f69f2c7-5deb-41a3-b136-0fa7d29cfbd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.071609] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0941ea0-0e71-4fa8-8674-ad8c28f9a7bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.098527] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Reconfiguring VM instance instance-00000043 to attach disk [datastore1] volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc/volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.099895] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f941a36d-0588-40c8-a5cd-f94ea3bb330d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.122867] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 857.122867] env[62619]: value = "task-1364937" [ 857.122867] env[62619]: _type = "Task" [ 857.122867] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.132988] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364937, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.235764] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "61d68c36-5251-4fad-9d3b-125296ae0861" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.236043] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "61d68c36-5251-4fad-9d3b-125296ae0861" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.236321] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.236541] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.236728] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "61d68c36-5251-4fad-9d3b-125296ae0861-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.242668] env[62619]: INFO nova.compute.manager [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Terminating instance [ 857.248745] env[62619]: DEBUG nova.compute.manager [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 857.248958] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 857.249817] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25e59ba5-a8f9-4947-8799-40861ffa848d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.260101] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 857.261601] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d907eada-84c9-4de2-ad24-bf91a9169e79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.268711] env[62619]: DEBUG oslo_vmware.api [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 857.268711] env[62619]: value = "task-1364938" [ 857.268711] env[62619]: _type = "Task" [ 857.268711] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.278446] env[62619]: DEBUG oslo_vmware.api [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364938, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.336038] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364936, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.686039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.336038] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] cfa74201-783f-4ef4-8860-e2f53e4dfb81/cfa74201-783f-4ef4-8860-e2f53e4dfb81.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 857.336419] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 857.336419] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a6eb376-171c-4a0a-b707-c51f1ab3e00d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.343145] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 857.343145] env[62619]: value = "task-1364939" [ 857.343145] env[62619]: _type = "Task" [ 857.343145] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.351512] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364939, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.392554] env[62619]: DEBUG nova.compute.manager [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 857.417139] env[62619]: DEBUG nova.scheduler.client.report [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 857.443759] env[62619]: DEBUG nova.compute.manager [req-d2a716ab-c6cc-4952-bf70-bd9677754922 req-18296da9-d8d3-4f41-8729-ffd422666b2d service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Received event network-vif-deleted-3f368f58-8e8a-4d73-a8fa-31466fbc92ee {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 857.445267] env[62619]: INFO nova.compute.manager [req-d2a716ab-c6cc-4952-bf70-bd9677754922 req-18296da9-d8d3-4f41-8729-ffd422666b2d service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Neutron deleted interface 3f368f58-8e8a-4d73-a8fa-31466fbc92ee; detaching it from the instance and deleting it from the info cache [ 857.445578] env[62619]: DEBUG nova.network.neutron [req-d2a716ab-c6cc-4952-bf70-bd9677754922 req-18296da9-d8d3-4f41-8729-ffd422666b2d service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.570034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.601020] env[62619]: DEBUG nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 857.629918] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 857.630347] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 857.630347] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 857.630615] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 857.630799] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 857.630950] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 857.631234] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 857.631542] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 857.631542] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 857.631641] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 857.631801] env[62619]: DEBUG nova.virt.hardware [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 857.635437] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-133277fa-199f-4549-8500-d2bf97a482f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.643132] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364937, 'name': ReconfigVM_Task, 'duration_secs': 0.334186} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.644106] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Reconfigured VM instance instance-00000043 to attach disk [datastore1] volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc/volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 857.650657] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7a4ddb48-8d23-413b-b696-91f9aec5c46f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.664777] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278eae68-8425-48a1-a859-ff959ff2d14d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.682308] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 857.682308] env[62619]: value = "task-1364940" [ 857.682308] env[62619]: _type = "Task" [ 857.682308] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.692680] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364940, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.756481] env[62619]: DEBUG nova.network.neutron [-] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.780261] env[62619]: DEBUG oslo_vmware.api [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364938, 'name': PowerOffVM_Task, 'duration_secs': 0.209312} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.780523] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 857.780708] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 857.781055] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a752a2cc-c86c-42e5-9980-ee1a901c839e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.852659] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364939, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.073128} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 857.852988] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 857.853962] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32b5ade0-ae34-4336-a75e-12db65ed9f9f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.878396] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Reconfiguring VM instance instance-00000044 to attach disk [datastore1] cfa74201-783f-4ef4-8860-e2f53e4dfb81/cfa74201-783f-4ef4-8860-e2f53e4dfb81.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 857.878875] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f060c4db-c99e-4403-be8b-bc3da0e124cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.901568] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 857.901568] env[62619]: value = "task-1364942" [ 857.901568] env[62619]: _type = "Task" [ 857.901568] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.913392] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364942, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.918087] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 857.918087] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 857.918087] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Deleting the datastore file [datastore2] 61d68c36-5251-4fad-9d3b-125296ae0861 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 857.918437] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 857.918701] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-42c7cddf-3d02-461f-9a67-3d3c13bf697e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.922683] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.367s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 857.923204] env[62619]: DEBUG nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 857.926916] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.815s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 857.928421] env[62619]: INFO nova.compute.claims [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 857.931254] env[62619]: DEBUG oslo_vmware.api [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for the task: (returnval){ [ 857.931254] env[62619]: value = "task-1364943" [ 857.931254] env[62619]: _type = "Task" [ 857.931254] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 857.941339] env[62619]: DEBUG oslo_vmware.api [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364943, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 857.949129] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-897ed1f5-9206-4f29-aaad-163af273f4d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.958564] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec6a93ed-fd11-4478-9790-4bdb46bf70f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.988020] env[62619]: DEBUG nova.compute.manager [req-d2a716ab-c6cc-4952-bf70-bd9677754922 req-18296da9-d8d3-4f41-8729-ffd422666b2d service nova] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Detach interface failed, port_id=3f368f58-8e8a-4d73-a8fa-31466fbc92ee, reason: Instance 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 858.194070] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364940, 'name': ReconfigVM_Task, 'duration_secs': 0.139518} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.194070] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290463', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'name': 'volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c30e0db3-9b63-44b7-9b7f-810defc530d1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'serial': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 858.194859] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-144bf1eb-c0ad-4341-8480-5e6f5f1b5d05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.202389] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 858.202389] env[62619]: value = "task-1364944" [ 858.202389] env[62619]: _type = "Task" [ 858.202389] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.213305] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364944, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.258850] env[62619]: INFO nova.compute.manager [-] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Took 1.49 seconds to deallocate network for instance. [ 858.418181] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364942, 'name': ReconfigVM_Task, 'duration_secs': 0.280613} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.418919] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Reconfigured VM instance instance-00000044 to attach disk [datastore1] cfa74201-783f-4ef4-8860-e2f53e4dfb81/cfa74201-783f-4ef4-8860-e2f53e4dfb81.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 858.419851] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6cda5a43-e617-4e9a-b9bd-b08200b9a437 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.430609] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 858.430609] env[62619]: value = "task-1364945" [ 858.430609] env[62619]: _type = "Task" [ 858.430609] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.438031] env[62619]: DEBUG nova.compute.utils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 858.440720] env[62619]: DEBUG nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 858.441089] env[62619]: DEBUG nova.network.neutron [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 858.455508] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364945, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.460847] env[62619]: DEBUG oslo_vmware.api [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Task: {'id': task-1364943, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.199363} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.460847] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 858.460847] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 858.462265] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 858.462797] env[62619]: INFO nova.compute.manager [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Took 1.21 seconds to destroy the instance on the hypervisor. [ 858.462797] env[62619]: DEBUG oslo.service.loopingcall [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.463686] env[62619]: DEBUG nova.compute.manager [-] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 858.463785] env[62619]: DEBUG nova.network.neutron [-] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 858.563778] env[62619]: DEBUG nova.network.neutron [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Successfully updated port: 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 858.591691] env[62619]: DEBUG nova.policy [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06a6e5bdf61740b2812e05761d99d548', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c226b0d107bd4abe8cd97f79bb2cc02a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 858.712054] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364944, 'name': Rename_Task, 'duration_secs': 0.146411} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.712388] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 858.712700] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-af99c0ed-f727-4070-9244-fcb34af2e419 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.719495] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 858.719495] env[62619]: value = "task-1364946" [ 858.719495] env[62619]: _type = "Task" [ 858.719495] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.727762] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364946, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 858.774937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.917028] env[62619]: DEBUG nova.compute.manager [req-c54caf39-0a47-46cd-8254-969c8fa0b734 req-bcd16fb8-ebde-4ef0-890f-37d5e2ce0d8d service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-vif-plugged-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 858.917279] env[62619]: DEBUG oslo_concurrency.lockutils [req-c54caf39-0a47-46cd-8254-969c8fa0b734 req-bcd16fb8-ebde-4ef0-890f-37d5e2ce0d8d service nova] Acquiring lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 858.917492] env[62619]: DEBUG oslo_concurrency.lockutils [req-c54caf39-0a47-46cd-8254-969c8fa0b734 req-bcd16fb8-ebde-4ef0-890f-37d5e2ce0d8d service nova] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 858.917666] env[62619]: DEBUG oslo_concurrency.lockutils [req-c54caf39-0a47-46cd-8254-969c8fa0b734 req-bcd16fb8-ebde-4ef0-890f-37d5e2ce0d8d service nova] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 858.917839] env[62619]: DEBUG nova.compute.manager [req-c54caf39-0a47-46cd-8254-969c8fa0b734 req-bcd16fb8-ebde-4ef0-890f-37d5e2ce0d8d service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] No waiting events found dispatching network-vif-plugged-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 858.918165] env[62619]: WARNING nova.compute.manager [req-c54caf39-0a47-46cd-8254-969c8fa0b734 req-bcd16fb8-ebde-4ef0-890f-37d5e2ce0d8d service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received unexpected event network-vif-plugged-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf for instance with vm_state building and task_state spawning. [ 858.939402] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364945, 'name': Rename_Task, 'duration_secs': 0.161673} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 858.939721] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 858.939990] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b6dfb2fc-82f8-4498-b110-f9a541923adb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.950251] env[62619]: DEBUG nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 858.953269] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 858.953269] env[62619]: value = "task-1364947" [ 858.953269] env[62619]: _type = "Task" [ 858.953269] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 858.966352] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364947, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.069023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 859.069023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 859.069023] env[62619]: DEBUG nova.network.neutron [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 859.104240] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 859.104731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 859.237550] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364946, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.241457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e3a02d2-e8fe-4a0c-a422-c34de9fcca82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.253571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11be42a1-4f21-49a0-afb3-41d108c68f0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.297448] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98c23aff-1ea2-4854-a250-62a5c9657812 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.308916] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc6d0d32-7172-4035-81b5-fa2b7e23c697 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.332445] env[62619]: DEBUG nova.compute.provider_tree [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 859.371340] env[62619]: DEBUG nova.network.neutron [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Successfully created port: 6ecf8988-6ce6-4b80-b927-57c2ef3a8100 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 859.468536] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364947, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 859.490222] env[62619]: DEBUG nova.compute.manager [req-794f3fc6-f5c7-43ab-a4c7-10af2334c3ea req-38e7e2bf-a380-4dd9-9d11-fc8925e8e642 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Received event network-vif-deleted-b245d7ba-eb3c-4286-840a-250524a98571 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 859.490536] env[62619]: INFO nova.compute.manager [req-794f3fc6-f5c7-43ab-a4c7-10af2334c3ea req-38e7e2bf-a380-4dd9-9d11-fc8925e8e642 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Neutron deleted interface b245d7ba-eb3c-4286-840a-250524a98571; detaching it from the instance and deleting it from the info cache [ 859.490915] env[62619]: DEBUG nova.network.neutron [req-794f3fc6-f5c7-43ab-a4c7-10af2334c3ea req-38e7e2bf-a380-4dd9-9d11-fc8925e8e642 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Updating instance_info_cache with network_info: [{"id": "9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50", "address": "fa:16:3e:08:97:70", "network": {"id": "3310b65e-5f7a-403d-b9a7-edc5ed11ec70", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-42072988", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.127", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "3cd104b68e8640f7a50da22df521f2d7", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4734e5e-2a76-4bda-8905-70c9bf9e007f", "external-id": "nsx-vlan-transportzone-122", "segmentation_id": 122, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9a2f13f9-f4", "ovs_interfaceid": "9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.624374] env[62619]: DEBUG nova.network.neutron [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 859.731748] env[62619]: DEBUG oslo_vmware.api [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1364946, 'name': PowerOnVM_Task, 'duration_secs': 0.904652} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.731748] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.731748] env[62619]: INFO nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Took 8.86 seconds to spawn the instance on the hypervisor. [ 859.732132] env[62619]: DEBUG nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 859.733716] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0596151d-5b92-46f1-bd9b-13854b65e48e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 859.835419] env[62619]: DEBUG nova.scheduler.client.report [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 859.881153] env[62619]: DEBUG nova.network.neutron [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 859.965549] env[62619]: DEBUG nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 859.983450] env[62619]: DEBUG oslo_vmware.api [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364947, 'name': PowerOnVM_Task, 'duration_secs': 0.77706} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 859.983896] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 859.984268] env[62619]: INFO nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Took 8.52 seconds to spawn the instance on the hypervisor. [ 859.984570] env[62619]: DEBUG nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 859.985901] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2f384b0-3216-4619-9e90-742570e15415 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.000816] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a0c04fda-d36e-4033-a688-2c929457bbce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.010900] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 860.011183] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 860.011354] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 860.011541] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 860.011692] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 860.011840] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 860.012097] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 860.012267] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 860.012439] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 860.012605] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 860.012797] env[62619]: DEBUG nova.virt.hardware [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 860.014025] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faf6b9fe-3fd2-421e-b2b5-3b30de0d9344 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.025196] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0971923-c4d4-48f2-8c42-27397f880f84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.030435] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73cf8282-2334-4a87-98f7-5db1643925fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.057218] env[62619]: DEBUG nova.compute.manager [req-794f3fc6-f5c7-43ab-a4c7-10af2334c3ea req-38e7e2bf-a380-4dd9-9d11-fc8925e8e642 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Detach interface failed, port_id=b245d7ba-eb3c-4286-840a-250524a98571, reason: Instance 61d68c36-5251-4fad-9d3b-125296ae0861 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 860.254209] env[62619]: INFO nova.compute.manager [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Took 29.66 seconds to build instance. [ 860.341577] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.414s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.342387] env[62619]: DEBUG nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 860.344691] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 10.727s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 860.346122] env[62619]: INFO nova.compute.claims [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 860.360547] env[62619]: DEBUG nova.network.neutron [-] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 860.383435] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 860.387024] env[62619]: DEBUG nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Instance network_info: |[{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 860.387024] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a9:43:0d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7b6beb91-6e5d-49a9-8465-ec68d37a4bbf', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 860.395629] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Creating folder: Project (4270942193cd4a9aa397784368b9ae64). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.397244] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-297c0b3f-817b-4b35-84d4-58613e072ebe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.408536] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Created folder: Project (4270942193cd4a9aa397784368b9ae64) in parent group-v290436. [ 860.408834] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Creating folder: Instances. Parent ref: group-v290480. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 860.409118] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-4aecc8bb-35a1-471c-850a-f8f3ee26f073 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.419043] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Created folder: Instances in parent group-v290480. [ 860.419357] env[62619]: DEBUG oslo.service.loopingcall [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 860.419600] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 860.419849] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d74d37f6-d221-4b01-be66-96fa0008e159 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.439073] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 860.439073] env[62619]: value = "task-1364950" [ 860.439073] env[62619]: _type = "Task" [ 860.439073] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.446945] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364950, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 860.528065] env[62619]: INFO nova.compute.manager [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Took 28.08 seconds to build instance. [ 860.756940] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac0c7a69-0c05-417b-a4fd-fc5b2b39aeeb tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "c30e0db3-9b63-44b7-9b7f-810defc530d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 82.663s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 860.850943] env[62619]: DEBUG nova.compute.utils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 860.855282] env[62619]: DEBUG nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 860.855457] env[62619]: DEBUG nova.network.neutron [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 860.865937] env[62619]: INFO nova.compute.manager [-] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Took 2.40 seconds to deallocate network for instance. [ 860.931320] env[62619]: DEBUG nova.policy [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06a6e5bdf61740b2812e05761d99d548', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c226b0d107bd4abe8cd97f79bb2cc02a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 860.961027] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364950, 'name': CreateVM_Task, 'duration_secs': 0.443273} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 860.961027] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 860.963703] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.964095] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.966326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 860.966326] env[62619]: DEBUG nova.compute.manager [req-62754422-4aed-4a63-8eae-56c18f0fbb06 req-12e3b263-119c-4677-b17d-de51a1bcb2b5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 860.966446] env[62619]: DEBUG nova.compute.manager [req-62754422-4aed-4a63-8eae-56c18f0fbb06 req-12e3b263-119c-4677-b17d-de51a1bcb2b5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing instance network info cache due to event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 860.966574] env[62619]: DEBUG oslo_concurrency.lockutils [req-62754422-4aed-4a63-8eae-56c18f0fbb06 req-12e3b263-119c-4677-b17d-de51a1bcb2b5 service nova] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 860.966720] env[62619]: DEBUG oslo_concurrency.lockutils [req-62754422-4aed-4a63-8eae-56c18f0fbb06 req-12e3b263-119c-4677-b17d-de51a1bcb2b5 service nova] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 860.966874] env[62619]: DEBUG nova.network.neutron [req-62754422-4aed-4a63-8eae-56c18f0fbb06 req-12e3b263-119c-4677-b17d-de51a1bcb2b5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 860.968782] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b33ccf17-c065-45da-b281-76216cb9fcbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 860.975182] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 860.975182] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524c98ba-01b7-a089-e337-c2e5fe3a3df9" [ 860.975182] env[62619]: _type = "Task" [ 860.975182] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 860.984737] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524c98ba-01b7-a089-e337-c2e5fe3a3df9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.030596] env[62619]: DEBUG oslo_concurrency.lockutils [None req-eb633f7a-e154-4114-8a51-07539d29f002 tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 62.088s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.261346] env[62619]: DEBUG nova.compute.manager [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.358516] env[62619]: DEBUG nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 861.376452] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.443612] env[62619]: DEBUG nova.network.neutron [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Successfully created port: 5f53828c-c7ba-4916-a4e8-82eef12e1166 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 861.453196] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquiring lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.453459] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.453715] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquiring lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.453841] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.454021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.457372] env[62619]: INFO nova.compute.manager [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Terminating instance [ 861.459693] env[62619]: DEBUG nova.compute.manager [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 861.459693] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 861.462059] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-276af21b-9536-4753-bea5-4a542d6b8ae7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.478495] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 861.482395] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e15b075b-6357-4344-a8ee-2216fea30893 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.494042] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524c98ba-01b7-a089-e337-c2e5fe3a3df9, 'name': SearchDatastore_Task, 'duration_secs': 0.017461} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 861.494606] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 861.494949] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 861.495251] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 861.495560] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 861.495638] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 861.496443] env[62619]: DEBUG oslo_vmware.api [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 861.496443] env[62619]: value = "task-1364951" [ 861.496443] env[62619]: _type = "Task" [ 861.496443] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.496443] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-82f235dc-2eed-4f8b-ac7d-8adaabadc74c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.511778] env[62619]: DEBUG oslo_vmware.api [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364951, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.513698] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 861.513840] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 861.514595] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6814997b-3310-468b-8112-82cb2e655f83 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.526022] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 861.526022] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528fb0d6-1277-ece5-bcd5-150647840293" [ 861.526022] env[62619]: _type = "Task" [ 861.526022] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.532698] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528fb0d6-1277-ece5-bcd5-150647840293, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 861.532814] env[62619]: DEBUG nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 861.591806] env[62619]: DEBUG nova.compute.manager [req-f75b8a37-c26d-484c-b2a1-b7d5948b331b req-676c5712-5c34-4fa7-9b10-9a5dc5951555 service nova] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Received event network-vif-deleted-9a2f13f9-f4d6-4377-82e8-2e4a4b05bd50 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 861.592034] env[62619]: DEBUG nova.compute.manager [req-f75b8a37-c26d-484c-b2a1-b7d5948b331b req-676c5712-5c34-4fa7-9b10-9a5dc5951555 service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Received event network-vif-plugged-6ecf8988-6ce6-4b80-b927-57c2ef3a8100 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 861.592236] env[62619]: DEBUG oslo_concurrency.lockutils [req-f75b8a37-c26d-484c-b2a1-b7d5948b331b req-676c5712-5c34-4fa7-9b10-9a5dc5951555 service nova] Acquiring lock "ca452ef6-d777-46dd-a313-ae7dd441adca-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.593167] env[62619]: DEBUG oslo_concurrency.lockutils [req-f75b8a37-c26d-484c-b2a1-b7d5948b331b req-676c5712-5c34-4fa7-9b10-9a5dc5951555 service nova] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 861.593167] env[62619]: DEBUG oslo_concurrency.lockutils [req-f75b8a37-c26d-484c-b2a1-b7d5948b331b req-676c5712-5c34-4fa7-9b10-9a5dc5951555 service nova] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 861.593167] env[62619]: DEBUG nova.compute.manager [req-f75b8a37-c26d-484c-b2a1-b7d5948b331b req-676c5712-5c34-4fa7-9b10-9a5dc5951555 service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] No waiting events found dispatching network-vif-plugged-6ecf8988-6ce6-4b80-b927-57c2ef3a8100 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 861.593167] env[62619]: WARNING nova.compute.manager [req-f75b8a37-c26d-484c-b2a1-b7d5948b331b req-676c5712-5c34-4fa7-9b10-9a5dc5951555 service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Received unexpected event network-vif-plugged-6ecf8988-6ce6-4b80-b927-57c2ef3a8100 for instance with vm_state building and task_state spawning. [ 861.624027] env[62619]: DEBUG nova.network.neutron [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Successfully updated port: 6ecf8988-6ce6-4b80-b927-57c2ef3a8100 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 861.660413] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b00477c-515b-4432-8d06-bf6e05139b21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.666445] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0824c18-2065-4aa7-90f7-39964cd05619 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.702534] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c98507c1-c751-4466-94cc-4be52d2da27b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.711109] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-609c5a16-a3a3-4ec5-8aa9-a50da2fad851 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.727134] env[62619]: DEBUG nova.compute.provider_tree [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 861.783275] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 861.872964] env[62619]: DEBUG nova.network.neutron [req-62754422-4aed-4a63-8eae-56c18f0fbb06 req-12e3b263-119c-4677-b17d-de51a1bcb2b5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updated VIF entry in instance network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 861.873071] env[62619]: DEBUG nova.network.neutron [req-62754422-4aed-4a63-8eae-56c18f0fbb06 req-12e3b263-119c-4677-b17d-de51a1bcb2b5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 862.008435] env[62619]: DEBUG oslo_vmware.api [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364951, 'name': PowerOffVM_Task, 'duration_secs': 0.184539} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.008730] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 862.008905] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 862.009175] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09ae4800-9da9-4c46-b619-d4d57832a6f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.033458] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528fb0d6-1277-ece5-bcd5-150647840293, 'name': SearchDatastore_Task, 'duration_secs': 0.010577} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.034304] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ad7310a-3277-45b4-ad7f-1f850304b39d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.041871] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 862.041871] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5251dc69-2746-fd2e-4d3d-345568a94119" [ 862.041871] env[62619]: _type = "Task" [ 862.041871] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.052984] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5251dc69-2746-fd2e-4d3d-345568a94119, 'name': SearchDatastore_Task, 'duration_secs': 0.009789} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.053325] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.053618] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 646b4ae6-09e1-4b3c-b17d-392e746df454/646b4ae6-09e1-4b3c-b17d-392e746df454.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 862.053895] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-297a1e74-e16a-45b6-a0ea-44eb6b827499 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.056733] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.061414] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 862.061414] env[62619]: value = "task-1364953" [ 862.061414] env[62619]: _type = "Task" [ 862.061414] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.069662] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364953, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.109329] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 862.109329] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 862.109329] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Deleting the datastore file [datastore1] cfa74201-783f-4ef4-8860-e2f53e4dfb81 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 862.109664] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8e4aae68-059b-4939-acdd-f7cf385e27b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.117605] env[62619]: DEBUG oslo_vmware.api [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for the task: (returnval){ [ 862.117605] env[62619]: value = "task-1364954" [ 862.117605] env[62619]: _type = "Task" [ 862.117605] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.125759] env[62619]: DEBUG oslo_vmware.api [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364954, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.130888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 862.130888] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 862.130888] env[62619]: DEBUG nova.network.neutron [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 862.231727] env[62619]: DEBUG nova.scheduler.client.report [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 862.371028] env[62619]: DEBUG nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 862.376173] env[62619]: DEBUG oslo_concurrency.lockutils [req-62754422-4aed-4a63-8eae-56c18f0fbb06 req-12e3b263-119c-4677-b17d-de51a1bcb2b5 service nova] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 862.412396] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 862.413582] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 862.413582] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 862.413582] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 862.413582] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 862.413582] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 862.413582] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 862.413865] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 862.413898] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 862.414424] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 862.414424] env[62619]: DEBUG nova.virt.hardware [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 862.415819] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a4f2ec9-d202-42c9-bebb-85e53528d580 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.425721] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6345134b-c4cf-43dd-b0d5-a4e8fe58bfac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.572076] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364953, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.628868] env[62619]: DEBUG oslo_vmware.api [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Task: {'id': task-1364954, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.422315} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.628868] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 862.628868] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 862.628868] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 862.628868] env[62619]: INFO nova.compute.manager [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Took 1.17 seconds to destroy the instance on the hypervisor. [ 862.629120] env[62619]: DEBUG oslo.service.loopingcall [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 862.629200] env[62619]: DEBUG nova.compute.manager [-] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 862.629292] env[62619]: DEBUG nova.network.neutron [-] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 862.714965] env[62619]: DEBUG nova.network.neutron [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 862.738014] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.393s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 862.738859] env[62619]: DEBUG nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 862.742479] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.804s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 862.744075] env[62619]: INFO nova.compute.claims [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 862.816591] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.820101] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 862.987132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 862.987132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 863.000887] env[62619]: DEBUG nova.compute.manager [req-5a2428f6-1103-4ecc-9344-277cd9fa9136 req-cda3aeb9-69b3-42ec-aa95-23ed50523645 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Received event network-changed-5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.002312] env[62619]: DEBUG nova.compute.manager [req-5a2428f6-1103-4ecc-9344-277cd9fa9136 req-cda3aeb9-69b3-42ec-aa95-23ed50523645 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Refreshing instance network info cache due to event network-changed-5f9d96e1-ec69-4313-99e3-a91700248c54. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 863.002859] env[62619]: DEBUG oslo_concurrency.lockutils [req-5a2428f6-1103-4ecc-9344-277cd9fa9136 req-cda3aeb9-69b3-42ec-aa95-23ed50523645 service nova] Acquiring lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.003837] env[62619]: DEBUG oslo_concurrency.lockutils [req-5a2428f6-1103-4ecc-9344-277cd9fa9136 req-cda3aeb9-69b3-42ec-aa95-23ed50523645 service nova] Acquired lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.004148] env[62619]: DEBUG nova.network.neutron [req-5a2428f6-1103-4ecc-9344-277cd9fa9136 req-cda3aeb9-69b3-42ec-aa95-23ed50523645 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Refreshing network info cache for port 5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 863.051503] env[62619]: DEBUG nova.network.neutron [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Updating instance_info_cache with network_info: [{"id": "6ecf8988-6ce6-4b80-b927-57c2ef3a8100", "address": "fa:16:3e:cc:c5:14", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecf8988-6c", "ovs_interfaceid": "6ecf8988-6ce6-4b80-b927-57c2ef3a8100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.073609] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364953, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.245797] env[62619]: DEBUG nova.compute.utils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 863.245797] env[62619]: DEBUG nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 863.253022] env[62619]: DEBUG nova.network.neutron [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 863.297802] env[62619]: DEBUG nova.policy [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '06a6e5bdf61740b2812e05761d99d548', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c226b0d107bd4abe8cd97f79bb2cc02a', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 863.326992] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 863.328019] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 863.328019] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 863.379730] env[62619]: DEBUG nova.network.neutron [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Successfully updated port: 5f53828c-c7ba-4916-a4e8-82eef12e1166 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 863.556899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 863.557260] env[62619]: DEBUG nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Instance network_info: |[{"id": "6ecf8988-6ce6-4b80-b927-57c2ef3a8100", "address": "fa:16:3e:cc:c5:14", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecf8988-6c", "ovs_interfaceid": "6ecf8988-6ce6-4b80-b927-57c2ef3a8100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 863.557695] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:c5:14', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e27fd35-1d7b-4358-92d5-4d34da27b992', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6ecf8988-6ce6-4b80-b927-57c2ef3a8100', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 863.565252] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Creating folder: Project (c226b0d107bd4abe8cd97f79bb2cc02a). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.565607] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-77bb4276-0eeb-4693-b487-8f65710fd1eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.576168] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364953, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.579993] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Created folder: Project (c226b0d107bd4abe8cd97f79bb2cc02a) in parent group-v290436. [ 863.580230] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Creating folder: Instances. Parent ref: group-v290483. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 863.580498] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fb058c44-d879-4d06-9853-70bf038eb7f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.589519] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Created folder: Instances in parent group-v290483. [ 863.589765] env[62619]: DEBUG oslo.service.loopingcall [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 863.590359] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 863.594033] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-139bfddf-8e95-49ee-bab0-381705192f6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 863.605460] env[62619]: DEBUG nova.network.neutron [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Successfully created port: 6b6b714f-2980-4b3f-b83a-26862818d2fb {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 863.611478] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 863.611478] env[62619]: value = "task-1364957" [ 863.611478] env[62619]: _type = "Task" [ 863.611478] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 863.614467] env[62619]: DEBUG nova.network.neutron [-] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.621416] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364957, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 863.684527] env[62619]: DEBUG nova.compute.manager [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Received event network-changed-6ecf8988-6ce6-4b80-b927-57c2ef3a8100 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 863.684741] env[62619]: DEBUG nova.compute.manager [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Refreshing instance network info cache due to event network-changed-6ecf8988-6ce6-4b80-b927-57c2ef3a8100. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 863.684950] env[62619]: DEBUG oslo_concurrency.lockutils [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] Acquiring lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.685102] env[62619]: DEBUG oslo_concurrency.lockutils [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] Acquired lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.685298] env[62619]: DEBUG nova.network.neutron [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Refreshing network info cache for port 6ecf8988-6ce6-4b80-b927-57c2ef3a8100 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 863.755369] env[62619]: DEBUG nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 863.765843] env[62619]: DEBUG nova.network.neutron [req-5a2428f6-1103-4ecc-9344-277cd9fa9136 req-cda3aeb9-69b3-42ec-aa95-23ed50523645 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Updated VIF entry in instance network info cache for port 5f9d96e1-ec69-4313-99e3-a91700248c54. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 863.767729] env[62619]: DEBUG nova.network.neutron [req-5a2428f6-1103-4ecc-9344-277cd9fa9136 req-cda3aeb9-69b3-42ec-aa95-23ed50523645 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Updating instance_info_cache with network_info: [{"id": "5f9d96e1-ec69-4313-99e3-a91700248c54", "address": "fa:16:3e:09:e6:e7", "network": {"id": "10f913fe-f846-48d1-b9eb-8479a1b45319", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-767095649-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.136", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "944cd46054cf4352a1dff3284bd5a88c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "3f9ffee1-f413-4f28-8bc4-3fb2cf299789", "external-id": "nsx-vlan-transportzone-599", "segmentation_id": 599, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f9d96e1-ec", "ovs_interfaceid": "5f9d96e1-ec69-4313-99e3-a91700248c54", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 863.840861] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Skipping network cache update for instance because it is being deleted. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 863.840861] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.840861] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.840861] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.840861] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.840861] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 863.840861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.840861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquired lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.840861] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 863.840861] env[62619]: DEBUG nova.objects.instance [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lazy-loading 'info_cache' on Instance uuid 9fd66533-39ff-401d-81ef-f37eaceb3103 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 863.886675] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "refresh_cache-59b960b6-aa41-4409-a899-9829388c3ff2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 863.886829] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "refresh_cache-59b960b6-aa41-4409-a899-9829388c3ff2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 863.887588] env[62619]: DEBUG nova.network.neutron [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 864.043624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1b88f63-7b31-46f6-8756-2aeada78dfcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.051110] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e13c73d6-19a0-48b3-8a25-4072d4355ab6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.091402] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7964d28e-db42-4312-800a-cd33ae0aab3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.100127] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364953, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.522038} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.103636] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 646b4ae6-09e1-4b3c-b17d-392e746df454/646b4ae6-09e1-4b3c-b17d-392e746df454.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 864.103636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 864.103636] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-27fe460b-3910-4bd5-bb7e-143a9e520d9d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.105899] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c978b7c8-2c97-407b-9845-9ffb5d28bdf6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.128762] env[62619]: INFO nova.compute.manager [-] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Took 1.50 seconds to deallocate network for instance. [ 864.129704] env[62619]: DEBUG nova.compute.provider_tree [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 864.131012] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 864.131012] env[62619]: value = "task-1364958" [ 864.131012] env[62619]: _type = "Task" [ 864.131012] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.143767] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364957, 'name': CreateVM_Task, 'duration_secs': 0.355308} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.144963] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 864.145691] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.145960] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.146334] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 864.149940] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8d7ccdb-dd7c-4571-9d62-bad935e893bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.151796] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364958, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.155282] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 864.155282] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52cc9a9a-24ae-f55c-c7a5-2a5dc14063a1" [ 864.155282] env[62619]: _type = "Task" [ 864.155282] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.166213] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52cc9a9a-24ae-f55c-c7a5-2a5dc14063a1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.269669] env[62619]: DEBUG oslo_concurrency.lockutils [req-5a2428f6-1103-4ecc-9344-277cd9fa9136 req-cda3aeb9-69b3-42ec-aa95-23ed50523645 service nova] Releasing lock "refresh_cache-c30e0db3-9b63-44b7-9b7f-810defc530d1" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.435209] env[62619]: DEBUG nova.network.neutron [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 864.559950] env[62619]: DEBUG nova.network.neutron [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Updated VIF entry in instance network info cache for port 6ecf8988-6ce6-4b80-b927-57c2ef3a8100. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 864.559950] env[62619]: DEBUG nova.network.neutron [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Updating instance_info_cache with network_info: [{"id": "6ecf8988-6ce6-4b80-b927-57c2ef3a8100", "address": "fa:16:3e:cc:c5:14", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecf8988-6c", "ovs_interfaceid": "6ecf8988-6ce6-4b80-b927-57c2ef3a8100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.587613] env[62619]: DEBUG nova.network.neutron [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Updating instance_info_cache with network_info: [{"id": "5f53828c-c7ba-4916-a4e8-82eef12e1166", "address": "fa:16:3e:36:04:bf", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f53828c-c7", "ovs_interfaceid": "5f53828c-c7ba-4916-a4e8-82eef12e1166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 864.632948] env[62619]: DEBUG nova.scheduler.client.report [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 864.645327] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 864.645680] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364958, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.089498} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.645931] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 864.646771] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0bc1dbf-f0ff-4c1c-b17f-8232d7fe1126 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.670144] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Reconfiguring VM instance instance-00000045 to attach disk [datastore1] 646b4ae6-09e1-4b3c-b17d-392e746df454/646b4ae6-09e1-4b3c-b17d-392e746df454.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 864.674100] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7caaf751-75bb-44e4-bc5e-3d5218bd5035 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.693863] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52cc9a9a-24ae-f55c-c7a5-2a5dc14063a1, 'name': SearchDatastore_Task, 'duration_secs': 0.011203} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 864.695187] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 864.695434] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 864.695677] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 864.695831] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 864.696013] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 864.696347] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 864.696347] env[62619]: value = "task-1364959" [ 864.696347] env[62619]: _type = "Task" [ 864.696347] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.696535] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5df0af2f-175d-4273-b96d-c3141134b653 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.706494] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364959, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.707644] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 864.707844] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 864.708551] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa753dec-9c5c-4df3-8165-7ffd5022c41d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.713811] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 864.713811] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5229d0d7-8f87-e6fc-b097-0a768dba65af" [ 864.713811] env[62619]: _type = "Task" [ 864.713811] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 864.723308] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5229d0d7-8f87-e6fc-b097-0a768dba65af, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 864.767647] env[62619]: DEBUG nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 864.796529] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 864.796529] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 864.796730] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 864.796872] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 864.797092] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 864.797296] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 864.797556] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 864.797753] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 864.797970] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 864.798221] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 864.798525] env[62619]: DEBUG nova.virt.hardware [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 864.799542] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-511af198-820a-4587-9b13-86af06d98a08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 864.808359] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a1c3b7c-5b61-43e9-8d7a-cf0b720e55d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.029447] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233cc65-504e-7eb3-b497-eb48b8588754/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 865.030545] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50776038-71a9-4044-8199-bc27994ac495 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.035113] env[62619]: DEBUG nova.compute.manager [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Received event network-vif-plugged-5f53828c-c7ba-4916-a4e8-82eef12e1166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.035328] env[62619]: DEBUG oslo_concurrency.lockutils [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] Acquiring lock "59b960b6-aa41-4409-a899-9829388c3ff2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.035558] env[62619]: DEBUG oslo_concurrency.lockutils [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] Lock "59b960b6-aa41-4409-a899-9829388c3ff2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.035691] env[62619]: DEBUG oslo_concurrency.lockutils [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] Lock "59b960b6-aa41-4409-a899-9829388c3ff2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.035858] env[62619]: DEBUG nova.compute.manager [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] No waiting events found dispatching network-vif-plugged-5f53828c-c7ba-4916-a4e8-82eef12e1166 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 865.036029] env[62619]: WARNING nova.compute.manager [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Received unexpected event network-vif-plugged-5f53828c-c7ba-4916-a4e8-82eef12e1166 for instance with vm_state building and task_state spawning. [ 865.036194] env[62619]: DEBUG nova.compute.manager [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Received event network-changed-5f53828c-c7ba-4916-a4e8-82eef12e1166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.036345] env[62619]: DEBUG nova.compute.manager [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Refreshing instance network info cache due to event network-changed-5f53828c-c7ba-4916-a4e8-82eef12e1166. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 865.036507] env[62619]: DEBUG oslo_concurrency.lockutils [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] Acquiring lock "refresh_cache-59b960b6-aa41-4409-a899-9829388c3ff2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.040641] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233cc65-504e-7eb3-b497-eb48b8588754/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 865.040894] env[62619]: ERROR oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233cc65-504e-7eb3-b497-eb48b8588754/disk-0.vmdk due to incomplete transfer. [ 865.041160] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-86593635-3e7e-4f8c-b847-a547aed69145 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.049544] env[62619]: DEBUG oslo_vmware.rw_handles [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5233cc65-504e-7eb3-b497-eb48b8588754/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 865.049753] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Uploaded image ac757367-2c71-4040-8956-e58c49d088b3 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 865.052150] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 865.052408] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-8ad80b8b-d37c-46ca-9c3c-c5f8484390a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.058717] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 865.058717] env[62619]: value = "task-1364960" [ 865.058717] env[62619]: _type = "Task" [ 865.058717] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.062072] env[62619]: DEBUG oslo_concurrency.lockutils [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] Releasing lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.062316] env[62619]: DEBUG nova.compute.manager [req-67de84df-7e61-42c9-9f6f-c1ecd29ab4fc req-1a710304-d1f3-440c-9eb3-8d15dfe98d9a service nova] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Received event network-vif-deleted-9e4efb80-3307-4c94-9bac-f76ba96bc57d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.067106] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364960, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.090895] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "refresh_cache-59b960b6-aa41-4409-a899-9829388c3ff2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.090895] env[62619]: DEBUG nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Instance network_info: |[{"id": "5f53828c-c7ba-4916-a4e8-82eef12e1166", "address": "fa:16:3e:36:04:bf", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f53828c-c7", "ovs_interfaceid": "5f53828c-c7ba-4916-a4e8-82eef12e1166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 865.090895] env[62619]: DEBUG oslo_concurrency.lockutils [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] Acquired lock "refresh_cache-59b960b6-aa41-4409-a899-9829388c3ff2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.091125] env[62619]: DEBUG nova.network.neutron [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Refreshing network info cache for port 5f53828c-c7ba-4916-a4e8-82eef12e1166 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 865.092577] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:36:04:bf', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e27fd35-1d7b-4358-92d5-4d34da27b992', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f53828c-c7ba-4916-a4e8-82eef12e1166', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 865.100409] env[62619]: DEBUG oslo.service.loopingcall [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 865.100894] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 865.101134] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-87b9ed07-02f1-4c6a-a695-8f5b78332e81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.121358] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 865.121358] env[62619]: value = "task-1364961" [ 865.121358] env[62619]: _type = "Task" [ 865.121358] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.129841] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364961, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.140179] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.398s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.140768] env[62619]: DEBUG nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 865.143517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.015s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.144862] env[62619]: INFO nova.compute.claims [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 865.209479] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364959, 'name': ReconfigVM_Task, 'duration_secs': 0.266489} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.209750] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Reconfigured VM instance instance-00000045 to attach disk [datastore1] 646b4ae6-09e1-4b3c-b17d-392e746df454/646b4ae6-09e1-4b3c-b17d-392e746df454.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 865.210444] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-3094c4ef-395d-4a50-8bc5-a20d88bd4206 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.218700] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 865.218700] env[62619]: value = "task-1364962" [ 865.218700] env[62619]: _type = "Task" [ 865.218700] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.224999] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5229d0d7-8f87-e6fc-b097-0a768dba65af, 'name': SearchDatastore_Task, 'duration_secs': 0.010616} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.226057] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b042a6ba-9ed4-42de-9096-62bb566da97c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.230941] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364962, 'name': Rename_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.234113] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 865.234113] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5203809b-168f-f272-12b5-b8ff57e3713a" [ 865.234113] env[62619]: _type = "Task" [ 865.234113] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.241259] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5203809b-168f-f272-12b5-b8ff57e3713a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.568724] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364960, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.630336] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364961, 'name': CreateVM_Task, 'duration_secs': 0.449656} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.632281] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 865.632911] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 865.633092] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 865.633646] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 865.633909] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9f79187a-39f0-4973-b232-709a81fd2f96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.637911] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 865.637911] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dc6f5a-c0a0-efd2-6deb-9d5d72a04b81" [ 865.637911] env[62619]: _type = "Task" [ 865.637911] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.645215] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dc6f5a-c0a0-efd2-6deb-9d5d72a04b81, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.648511] env[62619]: DEBUG nova.compute.utils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 865.651404] env[62619]: DEBUG nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 865.651534] env[62619]: DEBUG nova.network.neutron [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 865.714654] env[62619]: DEBUG nova.compute.manager [req-270ebe23-c35c-4b01-ab77-88d125b29525 req-b0943fad-28e5-493d-aa0f-7c79dbf4951f service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Received event network-vif-plugged-6b6b714f-2980-4b3f-b83a-26862818d2fb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 865.714886] env[62619]: DEBUG oslo_concurrency.lockutils [req-270ebe23-c35c-4b01-ab77-88d125b29525 req-b0943fad-28e5-493d-aa0f-7c79dbf4951f service nova] Acquiring lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 865.715161] env[62619]: DEBUG oslo_concurrency.lockutils [req-270ebe23-c35c-4b01-ab77-88d125b29525 req-b0943fad-28e5-493d-aa0f-7c79dbf4951f service nova] Lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 865.715286] env[62619]: DEBUG oslo_concurrency.lockutils [req-270ebe23-c35c-4b01-ab77-88d125b29525 req-b0943fad-28e5-493d-aa0f-7c79dbf4951f service nova] Lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 865.715463] env[62619]: DEBUG nova.compute.manager [req-270ebe23-c35c-4b01-ab77-88d125b29525 req-b0943fad-28e5-493d-aa0f-7c79dbf4951f service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] No waiting events found dispatching network-vif-plugged-6b6b714f-2980-4b3f-b83a-26862818d2fb {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 865.715643] env[62619]: WARNING nova.compute.manager [req-270ebe23-c35c-4b01-ab77-88d125b29525 req-b0943fad-28e5-493d-aa0f-7c79dbf4951f service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Received unexpected event network-vif-plugged-6b6b714f-2980-4b3f-b83a-26862818d2fb for instance with vm_state building and task_state spawning. [ 865.730722] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364962, 'name': Rename_Task, 'duration_secs': 0.135282} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.731013] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 865.733631] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ea2ea5b2-c586-473e-8f40-84fef69dfe5f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.740229] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updating instance_info_cache with network_info: [{"id": "4503b37c-ef93-4256-bf3f-6045d5857427", "address": "fa:16:3e:b2:21:33", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4503b37c-ef", "ovs_interfaceid": "4503b37c-ef93-4256-bf3f-6045d5857427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.748431] env[62619]: DEBUG nova.policy [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '926f346bd63541c2bdde7749920f94f8', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'd0a67999ce56460cb5744f79256d1a2e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 865.750186] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 865.750186] env[62619]: value = "task-1364963" [ 865.750186] env[62619]: _type = "Task" [ 865.750186] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.755481] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5203809b-168f-f272-12b5-b8ff57e3713a, 'name': SearchDatastore_Task, 'duration_secs': 0.012395} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 865.758768] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 865.758988] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] ca452ef6-d777-46dd-a313-ae7dd441adca/ca452ef6-d777-46dd-a313-ae7dd441adca.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 865.759526] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1066208a-6965-474a-abbc-a1e42b881598 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 865.767736] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364963, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.769095] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 865.769095] env[62619]: value = "task-1364964" [ 865.769095] env[62619]: _type = "Task" [ 865.769095] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 865.781238] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 865.814274] env[62619]: DEBUG nova.network.neutron [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Updated VIF entry in instance network info cache for port 5f53828c-c7ba-4916-a4e8-82eef12e1166. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 865.815804] env[62619]: DEBUG nova.network.neutron [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Updating instance_info_cache with network_info: [{"id": "5f53828c-c7ba-4916-a4e8-82eef12e1166", "address": "fa:16:3e:36:04:bf", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f53828c-c7", "ovs_interfaceid": "5f53828c-c7ba-4916-a4e8-82eef12e1166", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 865.863107] env[62619]: DEBUG nova.network.neutron [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Successfully updated port: 6b6b714f-2980-4b3f-b83a-26862818d2fb {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 866.070241] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364960, 'name': Destroy_Task, 'duration_secs': 0.763039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.070652] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Destroyed the VM [ 866.070997] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 866.071301] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-eba4d065-f09a-44e5-a547-eccb34b9d860 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.079447] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 866.079447] env[62619]: value = "task-1364965" [ 866.079447] env[62619]: _type = "Task" [ 866.079447] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.089085] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364965, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.154667] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dc6f5a-c0a0-efd2-6deb-9d5d72a04b81, 'name': SearchDatastore_Task, 'duration_secs': 0.00859} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.155225] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.155570] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 866.156131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.156373] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.156641] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 866.157469] env[62619]: DEBUG nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 866.165351] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b64ba3d5-2061-4497-93cb-8aa7f8c64fcc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.181116] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 866.181342] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 866.182174] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9497c21e-c11b-4fa6-a7c1-15ce49f5a25c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.190817] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 866.190817] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d43b5f-85d5-c656-d206-26dc761ac442" [ 866.190817] env[62619]: _type = "Task" [ 866.190817] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.207689] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d43b5f-85d5-c656-d206-26dc761ac442, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.208458] env[62619]: DEBUG nova.network.neutron [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Successfully created port: 364b7b78-e7ea-45f8-986e-ee77f8b3fd35 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 866.247648] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Releasing lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.247984] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 866.248523] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.248709] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.248859] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.249036] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.249174] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.249334] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.249491] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 866.249655] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 866.270021] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364963, 'name': PowerOnVM_Task} progress is 88%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.283459] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364964, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.321899] env[62619]: DEBUG oslo_concurrency.lockutils [req-da4e9afd-fa9e-4085-9e92-b436e8d1f123 req-73395118-d7ce-4144-86e5-3d7b312e4dab service nova] Releasing lock "refresh_cache-59b960b6-aa41-4409-a899-9829388c3ff2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 866.366922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "refresh_cache-055a1071-bd4b-4d1b-88c0-7551a07aee9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 866.366922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "refresh_cache-055a1071-bd4b-4d1b-88c0-7551a07aee9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 866.366922] env[62619]: DEBUG nova.network.neutron [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 866.415615] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa80c2e9-9227-4078-8876-d0ab8879c08e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.423826] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1dc4291-8301-47cc-9c4b-f3280123401b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.454020] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4a91f30-64c3-4a81-849e-2fc56b9e875e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.461858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e0f533d-f0d2-4f3c-9b46-0f0d3d60ca4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.476945] env[62619]: DEBUG nova.compute.provider_tree [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 866.589691] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364965, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.702234] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d43b5f-85d5-c656-d206-26dc761ac442, 'name': SearchDatastore_Task, 'duration_secs': 0.057495} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.703056] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94576d3f-3f25-4b1d-8120-a266dc560e4b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.707933] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 866.707933] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52e7425b-5b9f-9a4e-13b0-62c630d7f366" [ 866.707933] env[62619]: _type = "Task" [ 866.707933] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.715971] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52e7425b-5b9f-9a4e-13b0-62c630d7f366, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.752924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 866.763770] env[62619]: DEBUG oslo_vmware.api [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1364963, 'name': PowerOnVM_Task, 'duration_secs': 0.740234} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.763869] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 866.764015] env[62619]: INFO nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Took 9.16 seconds to spawn the instance on the hypervisor. [ 866.764210] env[62619]: DEBUG nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 866.764953] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74ae8293-46e3-4b4d-80b8-777fc527c7be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.781103] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364964, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.585243} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 866.781351] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] ca452ef6-d777-46dd-a313-ae7dd441adca/ca452ef6-d777-46dd-a313-ae7dd441adca.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 866.781561] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 866.781823] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d0a3f26a-e6da-44c5-90a9-dd46a84d4b21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 866.788148] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 866.788148] env[62619]: value = "task-1364966" [ 866.788148] env[62619]: _type = "Task" [ 866.788148] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 866.796603] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364966, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 866.898415] env[62619]: DEBUG nova.network.neutron [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 866.980095] env[62619]: DEBUG nova.scheduler.client.report [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 867.048105] env[62619]: DEBUG nova.network.neutron [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Updating instance_info_cache with network_info: [{"id": "6b6b714f-2980-4b3f-b83a-26862818d2fb", "address": "fa:16:3e:f1:b4:5e", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6b714f-29", "ovs_interfaceid": "6b6b714f-2980-4b3f-b83a-26862818d2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 867.090622] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364965, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.169041] env[62619]: DEBUG nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 867.194656] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 867.194913] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 867.195092] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 867.195280] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 867.195437] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 867.195678] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 867.195907] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 867.196100] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 867.196278] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 867.196441] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 867.196676] env[62619]: DEBUG nova.virt.hardware [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 867.197548] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf929ede-db8a-4713-98f5-b28ba09621e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.205537] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5963164f-7fd7-402c-bed4-d3350b9183f1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.227658] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52e7425b-5b9f-9a4e-13b0-62c630d7f366, 'name': SearchDatastore_Task, 'duration_secs': 0.013803} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.227944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.228227] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 59b960b6-aa41-4409-a899-9829388c3ff2/59b960b6-aa41-4409-a899-9829388c3ff2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 867.228528] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2b0fb5b9-e35c-47af-b76a-3dc9759933dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.235483] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 867.235483] env[62619]: value = "task-1364967" [ 867.235483] env[62619]: _type = "Task" [ 867.235483] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.243277] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364967, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.283130] env[62619]: INFO nova.compute.manager [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Took 28.34 seconds to build instance. [ 867.297740] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364966, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.333398} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.298603] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 867.299485] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-146fec95-01f7-441d-b8e8-ff114e7c38f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.321859] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Reconfiguring VM instance instance-00000046 to attach disk [datastore2] ca452ef6-d777-46dd-a313-ae7dd441adca/ca452ef6-d777-46dd-a313-ae7dd441adca.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 867.322440] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2d0112a9-65ca-4ed9-ad8d-55ef3c401cc3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.342853] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 867.342853] env[62619]: value = "task-1364968" [ 867.342853] env[62619]: _type = "Task" [ 867.342853] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.350673] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364968, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.485395] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.342s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.485997] env[62619]: DEBUG nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 867.488686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.919s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 867.490078] env[62619]: INFO nova.compute.claims [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 867.550606] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "refresh_cache-055a1071-bd4b-4d1b-88c0-7551a07aee9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 867.551019] env[62619]: DEBUG nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Instance network_info: |[{"id": "6b6b714f-2980-4b3f-b83a-26862818d2fb", "address": "fa:16:3e:f1:b4:5e", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6b714f-29", "ovs_interfaceid": "6b6b714f-2980-4b3f-b83a-26862818d2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 867.551457] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f1:b4:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e27fd35-1d7b-4358-92d5-4d34da27b992', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b6b714f-2980-4b3f-b83a-26862818d2fb', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 867.559331] env[62619]: DEBUG oslo.service.loopingcall [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 867.559869] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 867.560169] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3496e14e-1f48-42f4-867e-cca1fa80d2db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.582900] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 867.582900] env[62619]: value = "task-1364969" [ 867.582900] env[62619]: _type = "Task" [ 867.582900] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 867.593910] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364965, 'name': RemoveSnapshot_Task, 'duration_secs': 1.398594} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 867.597031] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 867.597258] env[62619]: DEBUG nova.compute.manager [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 867.597486] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364969, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.598223] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a32226a-e89b-4c58-b34b-d74c377b9085 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 867.745506] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364967, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.785563] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3fb74c0a-60fe-4af7-913d-709a152cfdba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.151s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 867.825326] env[62619]: DEBUG nova.compute.manager [req-1aa4c9c1-02f3-4fe2-9925-d8fe6252fa44 req-68fd9d0c-4a9d-4329-8ee9-c68c55e20a93 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Received event network-changed-6b6b714f-2980-4b3f-b83a-26862818d2fb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 867.825586] env[62619]: DEBUG nova.compute.manager [req-1aa4c9c1-02f3-4fe2-9925-d8fe6252fa44 req-68fd9d0c-4a9d-4329-8ee9-c68c55e20a93 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Refreshing instance network info cache due to event network-changed-6b6b714f-2980-4b3f-b83a-26862818d2fb. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 867.825814] env[62619]: DEBUG oslo_concurrency.lockutils [req-1aa4c9c1-02f3-4fe2-9925-d8fe6252fa44 req-68fd9d0c-4a9d-4329-8ee9-c68c55e20a93 service nova] Acquiring lock "refresh_cache-055a1071-bd4b-4d1b-88c0-7551a07aee9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 867.825958] env[62619]: DEBUG oslo_concurrency.lockutils [req-1aa4c9c1-02f3-4fe2-9925-d8fe6252fa44 req-68fd9d0c-4a9d-4329-8ee9-c68c55e20a93 service nova] Acquired lock "refresh_cache-055a1071-bd4b-4d1b-88c0-7551a07aee9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 867.826151] env[62619]: DEBUG nova.network.neutron [req-1aa4c9c1-02f3-4fe2-9925-d8fe6252fa44 req-68fd9d0c-4a9d-4329-8ee9-c68c55e20a93 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Refreshing network info cache for port 6b6b714f-2980-4b3f-b83a-26862818d2fb {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 867.857077] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364968, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 867.995374] env[62619]: DEBUG nova.compute.utils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 868.000758] env[62619]: DEBUG nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 868.000948] env[62619]: DEBUG nova.network.neutron [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 868.097633] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364969, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.100536] env[62619]: DEBUG nova.policy [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1e90a23c6444273bc10051f3227804c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '998daea123aa48b2816d1cbe9e662950', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 868.110221] env[62619]: DEBUG nova.network.neutron [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Successfully updated port: 364b7b78-e7ea-45f8-986e-ee77f8b3fd35 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 868.112976] env[62619]: INFO nova.compute.manager [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Shelve offloading [ 868.115112] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 868.115367] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-934de0cd-e4d3-48e1-86ce-efcddd6d96f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.122724] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 868.122724] env[62619]: value = "task-1364970" [ 868.122724] env[62619]: _type = "Task" [ 868.122724] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.134272] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 868.134532] env[62619]: DEBUG nova.compute.manager [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 868.135323] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74df74d-1d0e-4ec5-8a60-02382a3502d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.141462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.141583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.141748] env[62619]: DEBUG nova.network.neutron [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 868.246994] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364967, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.890214} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.247297] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 59b960b6-aa41-4409-a899-9829388c3ff2/59b960b6-aa41-4409-a899-9829388c3ff2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 868.247522] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 868.250056] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2a28dc27-14ba-4bf1-98b9-232eb55d2468 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.254943] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 868.254943] env[62619]: value = "task-1364971" [ 868.254943] env[62619]: _type = "Task" [ 868.254943] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.263492] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364971, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.291770] env[62619]: DEBUG nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 868.353286] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364968, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.432520] env[62619]: DEBUG nova.compute.manager [req-b50a0bc4-c37f-4bcc-980d-2ddbb1bec75a req-547fb7a8-267c-495d-814a-3a2e24712c27 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 868.432649] env[62619]: DEBUG nova.compute.manager [req-b50a0bc4-c37f-4bcc-980d-2ddbb1bec75a req-547fb7a8-267c-495d-814a-3a2e24712c27 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing instance network info cache due to event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 868.433287] env[62619]: DEBUG oslo_concurrency.lockutils [req-b50a0bc4-c37f-4bcc-980d-2ddbb1bec75a req-547fb7a8-267c-495d-814a-3a2e24712c27 service nova] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.433287] env[62619]: DEBUG oslo_concurrency.lockutils [req-b50a0bc4-c37f-4bcc-980d-2ddbb1bec75a req-547fb7a8-267c-495d-814a-3a2e24712c27 service nova] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.433880] env[62619]: DEBUG nova.network.neutron [req-b50a0bc4-c37f-4bcc-980d-2ddbb1bec75a req-547fb7a8-267c-495d-814a-3a2e24712c27 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 868.501624] env[62619]: DEBUG nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 868.570545] env[62619]: DEBUG nova.network.neutron [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Successfully created port: 65733022-a28a-4fb7-8d06-564479643fc0 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 868.598631] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364969, 'name': CreateVM_Task, 'duration_secs': 0.847567} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.602176] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 868.604445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.604689] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.604958] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 868.605263] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2d04481e-aba6-4058-894a-e8285a5b8f16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.610097] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 868.610097] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ce8ecf-5325-5854-72c4-883f9dacde6e" [ 868.610097] env[62619]: _type = "Task" [ 868.610097] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.617128] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 868.617267] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquired lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 868.617412] env[62619]: DEBUG nova.network.neutron [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 868.624651] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ce8ecf-5325-5854-72c4-883f9dacde6e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.748758] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8580e7da-64d6-472a-8224-eb72124e50c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.760020] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db66cd10-9bcc-43b3-9ca8-e0cdc959cbc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.768692] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364971, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071806} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.792840] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 868.797599] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5539072-fc08-422a-8631-31a5cbe4e39d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.800443] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf40f195-8008-4a50-a905-de3bdab50c4d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.828026] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Reconfiguring VM instance instance-00000047 to attach disk [datastore2] 59b960b6-aa41-4409-a899-9829388c3ff2/59b960b6-aa41-4409-a899-9829388c3ff2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 868.829181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 868.830381] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2ebf35-780e-4b34-9f8d-0a74eb2be865 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.834586] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-593eedab-a092-4382-948f-375be4260ead {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.866532] env[62619]: DEBUG nova.compute.provider_tree [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 868.868023] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 868.868023] env[62619]: value = "task-1364972" [ 868.868023] env[62619]: _type = "Task" [ 868.868023] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.874325] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364968, 'name': ReconfigVM_Task, 'duration_secs': 1.010899} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 868.875044] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Reconfigured VM instance instance-00000046 to attach disk [datastore2] ca452ef6-d777-46dd-a313-ae7dd441adca/ca452ef6-d777-46dd-a313-ae7dd441adca.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 868.875632] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5ff3f543-01c3-48a4-8ec7-817a66bc93bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 868.880971] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364972, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.885336] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 868.885336] env[62619]: value = "task-1364973" [ 868.885336] env[62619]: _type = "Task" [ 868.885336] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 868.894776] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364973, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 868.917388] env[62619]: DEBUG nova.network.neutron [req-1aa4c9c1-02f3-4fe2-9925-d8fe6252fa44 req-68fd9d0c-4a9d-4329-8ee9-c68c55e20a93 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Updated VIF entry in instance network info cache for port 6b6b714f-2980-4b3f-b83a-26862818d2fb. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 868.917901] env[62619]: DEBUG nova.network.neutron [req-1aa4c9c1-02f3-4fe2-9925-d8fe6252fa44 req-68fd9d0c-4a9d-4329-8ee9-c68c55e20a93 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Updating instance_info_cache with network_info: [{"id": "6b6b714f-2980-4b3f-b83a-26862818d2fb", "address": "fa:16:3e:f1:b4:5e", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b6b714f-29", "ovs_interfaceid": "6b6b714f-2980-4b3f-b83a-26862818d2fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 868.938817] env[62619]: DEBUG nova.network.neutron [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updating instance_info_cache with network_info: [{"id": "4503b37c-ef93-4256-bf3f-6045d5857427", "address": "fa:16:3e:b2:21:33", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4503b37c-ef", "ovs_interfaceid": "4503b37c-ef93-4256-bf3f-6045d5857427", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.121253] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ce8ecf-5325-5854-72c4-883f9dacde6e, 'name': SearchDatastore_Task, 'duration_secs': 0.080195} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.121611] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.121892] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 869.122148] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.122320] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.122504] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 869.122795] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2feeb928-ffd2-41bf-bcd5-7f7195804919 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.136961] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 869.137651] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 869.139494] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a31779b-0369-48ce-b966-073ae05b0b3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.146961] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 869.146961] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b8d59d-9ba5-b88e-3117-ebfd331c30d9" [ 869.146961] env[62619]: _type = "Task" [ 869.146961] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.155825] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b8d59d-9ba5-b88e-3117-ebfd331c30d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.174147] env[62619]: DEBUG nova.network.neutron [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 869.343832] env[62619]: DEBUG nova.network.neutron [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Updating instance_info_cache with network_info: [{"id": "364b7b78-e7ea-45f8-986e-ee77f8b3fd35", "address": "fa:16:3e:97:b0:13", "network": {"id": "cb501158-0342-4b85-acd0-a84f20d40980", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1965346461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d0a67999ce56460cb5744f79256d1a2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap364b7b78-e7", "ovs_interfaceid": "364b7b78-e7ea-45f8-986e-ee77f8b3fd35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.369543] env[62619]: DEBUG nova.scheduler.client.report [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 869.382862] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364972, 'name': ReconfigVM_Task, 'duration_secs': 0.397436} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.383262] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Reconfigured VM instance instance-00000047 to attach disk [datastore2] 59b960b6-aa41-4409-a899-9829388c3ff2/59b960b6-aa41-4409-a899-9829388c3ff2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 869.383919] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-b7b6c8aa-5dc4-4764-932d-9e33136a0582 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.391122] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 869.391122] env[62619]: value = "task-1364974" [ 869.391122] env[62619]: _type = "Task" [ 869.391122] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.397490] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364973, 'name': Rename_Task, 'duration_secs': 0.166227} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.398151] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 869.398441] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-01e2fe40-65c4-4da9-b1f8-d2210a2d8f36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.403816] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364974, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.406944] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 869.406944] env[62619]: value = "task-1364975" [ 869.406944] env[62619]: _type = "Task" [ 869.406944] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.415426] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364975, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.421069] env[62619]: DEBUG oslo_concurrency.lockutils [req-1aa4c9c1-02f3-4fe2-9925-d8fe6252fa44 req-68fd9d0c-4a9d-4329-8ee9-c68c55e20a93 service nova] Releasing lock "refresh_cache-055a1071-bd4b-4d1b-88c0-7551a07aee9a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.441896] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.515102] env[62619]: DEBUG nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 869.543050] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 869.543414] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 869.543523] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 869.543661] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 869.543816] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 869.543952] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 869.544172] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 869.544347] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 869.544495] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 869.544658] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 869.544885] env[62619]: DEBUG nova.virt.hardware [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 869.545699] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8195553a-2e35-4999-9289-d6f6250c1715 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.562052] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d55f20-e1d0-49a5-90d1-d8fb2a15757b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.656790] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b8d59d-9ba5-b88e-3117-ebfd331c30d9, 'name': SearchDatastore_Task, 'duration_secs': 0.009039} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.657896] env[62619]: DEBUG nova.network.neutron [req-b50a0bc4-c37f-4bcc-980d-2ddbb1bec75a req-547fb7a8-267c-495d-814a-3a2e24712c27 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updated VIF entry in instance network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 869.658125] env[62619]: DEBUG nova.network.neutron [req-b50a0bc4-c37f-4bcc-980d-2ddbb1bec75a req-547fb7a8-267c-495d-814a-3a2e24712c27 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 869.660433] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-336cd6cb-6d3e-4d05-983e-f801d3328240 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.666451] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 869.666451] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52759489-da26-3a83-2d72-4ad5361434f4" [ 869.666451] env[62619]: _type = "Task" [ 869.666451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.683723] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52759489-da26-3a83-2d72-4ad5361434f4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.766803] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 869.767963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "8c07697f-0e20-4ec5-88ec-ec4420906313" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.767963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.768571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdd595eb-1db3-4889-901f-9dab3581523a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.776263] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 869.776495] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44d72ce1-dfb2-40d4-8db3-365d3dbade5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.845667] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 869.845772] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 869.845875] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleting the datastore file [datastore2] 9fd66533-39ff-401d-81ef-f37eaceb3103 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 869.846373] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Releasing lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 869.846652] env[62619]: DEBUG nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Instance network_info: |[{"id": "364b7b78-e7ea-45f8-986e-ee77f8b3fd35", "address": "fa:16:3e:97:b0:13", "network": {"id": "cb501158-0342-4b85-acd0-a84f20d40980", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1965346461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d0a67999ce56460cb5744f79256d1a2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap364b7b78-e7", "ovs_interfaceid": "364b7b78-e7ea-45f8-986e-ee77f8b3fd35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 869.846911] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d48fbc11-8195-4b0b-b750-ec64b31f49fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.848917] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:97:b0:13', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '364b7b78-e7ea-45f8-986e-ee77f8b3fd35', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 869.856449] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Creating folder: Project (d0a67999ce56460cb5744f79256d1a2e). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.857644] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-49527232-57a1-40e4-a354-68ef241db855 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.860431] env[62619]: DEBUG nova.compute.manager [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Received event network-vif-plugged-364b7b78-e7ea-45f8-986e-ee77f8b3fd35 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.860657] env[62619]: DEBUG oslo_concurrency.lockutils [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] Acquiring lock "f60e0bec-0811-4e91-bc45-b61874846497-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 869.860875] env[62619]: DEBUG oslo_concurrency.lockutils [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] Lock "f60e0bec-0811-4e91-bc45-b61874846497-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.861109] env[62619]: DEBUG oslo_concurrency.lockutils [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] Lock "f60e0bec-0811-4e91-bc45-b61874846497-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.861294] env[62619]: DEBUG nova.compute.manager [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] No waiting events found dispatching network-vif-plugged-364b7b78-e7ea-45f8-986e-ee77f8b3fd35 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 869.861464] env[62619]: WARNING nova.compute.manager [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Received unexpected event network-vif-plugged-364b7b78-e7ea-45f8-986e-ee77f8b3fd35 for instance with vm_state building and task_state spawning. [ 869.861624] env[62619]: DEBUG nova.compute.manager [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Received event network-changed-364b7b78-e7ea-45f8-986e-ee77f8b3fd35 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 869.861818] env[62619]: DEBUG nova.compute.manager [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Refreshing instance network info cache due to event network-changed-364b7b78-e7ea-45f8-986e-ee77f8b3fd35. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 869.862019] env[62619]: DEBUG oslo_concurrency.lockutils [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] Acquiring lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 869.862163] env[62619]: DEBUG oslo_concurrency.lockutils [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] Acquired lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 869.862317] env[62619]: DEBUG nova.network.neutron [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Refreshing network info cache for port 364b7b78-e7ea-45f8-986e-ee77f8b3fd35 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 869.864690] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 869.864690] env[62619]: value = "task-1364977" [ 869.864690] env[62619]: _type = "Task" [ 869.864690] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.873715] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364977, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.875064] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Created folder: Project (d0a67999ce56460cb5744f79256d1a2e) in parent group-v290436. [ 869.875064] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Creating folder: Instances. Parent ref: group-v290488. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 869.875261] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-363c8fa6-b646-4caa-a828-a8748cb43d5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.878815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 869.879339] env[62619]: DEBUG nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 869.881670] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.963s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 869.883468] env[62619]: INFO nova.compute.claims [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 869.887363] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Created folder: Instances in parent group-v290488. [ 869.887586] env[62619]: DEBUG oslo.service.loopingcall [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 869.887766] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 869.887964] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c5668d46-dd71-47f4-a145-fc6b23f55218 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.915048] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364974, 'name': Rename_Task, 'duration_secs': 0.273905} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 869.915254] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 869.915254] env[62619]: value = "task-1364980" [ 869.915254] env[62619]: _type = "Task" [ 869.915254] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.916387] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 869.916704] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2be36254-5b60-46c2-9717-9caeb8189e15 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 869.924016] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364975, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.928993] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364980, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 869.930099] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 869.930099] env[62619]: value = "task-1364981" [ 869.930099] env[62619]: _type = "Task" [ 869.930099] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 869.939149] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364981, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.161172] env[62619]: DEBUG oslo_concurrency.lockutils [req-b50a0bc4-c37f-4bcc-980d-2ddbb1bec75a req-547fb7a8-267c-495d-814a-3a2e24712c27 service nova] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.177350] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52759489-da26-3a83-2d72-4ad5361434f4, 'name': SearchDatastore_Task, 'duration_secs': 0.010175} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.177617] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 870.177883] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 055a1071-bd4b-4d1b-88c0-7551a07aee9a/055a1071-bd4b-4d1b-88c0-7551a07aee9a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 870.178197] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2abd78fa-ba84-48df-b4d6-d92c1313eb7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.184898] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 870.184898] env[62619]: value = "task-1364982" [ 870.184898] env[62619]: _type = "Task" [ 870.184898] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 870.192616] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364982, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.379612] env[62619]: DEBUG oslo_vmware.api [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1364977, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.306154} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.379885] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 870.380086] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 870.380267] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 870.389552] env[62619]: DEBUG nova.compute.utils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 870.391111] env[62619]: DEBUG nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 870.391285] env[62619]: DEBUG nova.network.neutron [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 870.410103] env[62619]: INFO nova.scheduler.client.report [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted allocations for instance 9fd66533-39ff-401d-81ef-f37eaceb3103 [ 870.425069] env[62619]: DEBUG oslo_vmware.api [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364975, 'name': PowerOnVM_Task, 'duration_secs': 0.970335} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 870.426026] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 870.426026] env[62619]: INFO nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Took 10.46 seconds to spawn the instance on the hypervisor. [ 870.426157] env[62619]: DEBUG nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 870.426847] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f4f80d4-633d-4feb-9ba5-322e707b55bd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 870.432466] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364980, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.445225] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364981, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.454181] env[62619]: DEBUG nova.policy [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5034ec8e3d3f4cc79e13528e3bf31167', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '578df9b6434d416fbae5f3cf2c33ef1b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 870.466323] env[62619]: DEBUG nova.compute.manager [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Received event network-vif-unplugged-4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 870.466540] env[62619]: DEBUG oslo_concurrency.lockutils [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] Acquiring lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.466750] env[62619]: DEBUG oslo_concurrency.lockutils [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 870.466918] env[62619]: DEBUG oslo_concurrency.lockutils [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 870.467101] env[62619]: DEBUG nova.compute.manager [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] No waiting events found dispatching network-vif-unplugged-4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 870.467276] env[62619]: WARNING nova.compute.manager [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Received unexpected event network-vif-unplugged-4503b37c-ef93-4256-bf3f-6045d5857427 for instance with vm_state shelved and task_state shelving_offloading. [ 870.467438] env[62619]: DEBUG nova.compute.manager [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Received event network-changed-4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 870.467593] env[62619]: DEBUG nova.compute.manager [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Refreshing instance network info cache due to event network-changed-4503b37c-ef93-4256-bf3f-6045d5857427. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 870.467779] env[62619]: DEBUG oslo_concurrency.lockutils [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] Acquiring lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 870.467915] env[62619]: DEBUG oslo_concurrency.lockutils [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] Acquired lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 870.468149] env[62619]: DEBUG nova.network.neutron [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Refreshing network info cache for port 4503b37c-ef93-4256-bf3f-6045d5857427 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 870.626611] env[62619]: DEBUG nova.network.neutron [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Updated VIF entry in instance network info cache for port 364b7b78-e7ea-45f8-986e-ee77f8b3fd35. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 870.626937] env[62619]: DEBUG nova.network.neutron [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Updating instance_info_cache with network_info: [{"id": "364b7b78-e7ea-45f8-986e-ee77f8b3fd35", "address": "fa:16:3e:97:b0:13", "network": {"id": "cb501158-0342-4b85-acd0-a84f20d40980", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1965346461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d0a67999ce56460cb5744f79256d1a2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap364b7b78-e7", "ovs_interfaceid": "364b7b78-e7ea-45f8-986e-ee77f8b3fd35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 870.696239] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364982, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.895113] env[62619]: DEBUG nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 870.921020] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 870.934251] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364980, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.959347] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364981, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 870.960115] env[62619]: INFO nova.compute.manager [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Took 27.79 seconds to build instance. [ 871.112272] env[62619]: DEBUG nova.network.neutron [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Successfully created port: 8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 871.130163] env[62619]: DEBUG oslo_concurrency.lockutils [req-7278222b-1846-4afc-abfb-d54977c5b6f7 req-a46f67e0-3823-4453-97b5-38a94edcb9ce service nova] Releasing lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.198664] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364982, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.988314} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.199247] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 055a1071-bd4b-4d1b-88c0-7551a07aee9a/055a1071-bd4b-4d1b-88c0-7551a07aee9a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 871.199477] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 871.199745] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-efbf5bfe-8989-4424-909d-fc8a2bc39bd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.205299] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a9d035-52fd-4d58-bfe6-1506bbaec0ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.215729] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae57b356-519b-459f-9734-78a2ebf9950f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.219627] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 871.219627] env[62619]: value = "task-1364983" [ 871.219627] env[62619]: _type = "Task" [ 871.219627] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.252257] env[62619]: DEBUG nova.network.neutron [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Successfully updated port: 65733022-a28a-4fb7-8d06-564479643fc0 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 871.255016] env[62619]: DEBUG nova.network.neutron [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updated VIF entry in instance network info cache for port 4503b37c-ef93-4256-bf3f-6045d5857427. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 871.255371] env[62619]: DEBUG nova.network.neutron [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updating instance_info_cache with network_info: [{"id": "4503b37c-ef93-4256-bf3f-6045d5857427", "address": "fa:16:3e:b2:21:33", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": null, "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap4503b37c-ef", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 871.257551] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03e2e367-90d5-4aca-b777-50a9acca5236 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.263212] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364983, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.269010] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1506a635-3d56-4105-b1f9-0790f59c0a47 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.284557] env[62619]: DEBUG nova.compute.provider_tree [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 871.431383] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364980, 'name': CreateVM_Task, 'duration_secs': 1.486976} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.431553] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 871.432225] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.432396] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.432721] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 871.433017] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-74d6ce0f-0bdf-4c2b-92bb-0954f21b4e00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.440520] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 871.440520] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5247fa7c-8dac-96f1-faf7-ac11ecb8620d" [ 871.440520] env[62619]: _type = "Task" [ 871.440520] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.443722] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364981, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.451150] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5247fa7c-8dac-96f1-faf7-ac11ecb8620d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.465073] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2dc77a45-dce1-49eb-bd74-1bf8d4af02cb tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.879s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.681686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "9fd66533-39ff-401d-81ef-f37eaceb3103" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.731989] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364983, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.100295} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.732286] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 871.733117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27a491eb-fd51-4636-a473-7a3201fe1ea9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.754973] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Reconfiguring VM instance instance-00000048 to attach disk [datastore2] 055a1071-bd4b-4d1b-88c0-7551a07aee9a/055a1071-bd4b-4d1b-88c0-7551a07aee9a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 871.755289] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3143b8e3-a268-472e-ae5d-d674851a724e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.769686] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "refresh_cache-1c1b7717-30a9-40c9-913f-6d65a619b94a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.769825] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "refresh_cache-1c1b7717-30a9-40c9-913f-6d65a619b94a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.769977] env[62619]: DEBUG nova.network.neutron [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 871.771867] env[62619]: DEBUG oslo_concurrency.lockutils [req-62efab71-453d-4685-a87c-a214495126d6 req-e7727510-ff7b-4f8d-9944-4ecaeeb917d3 service nova] Releasing lock "refresh_cache-9fd66533-39ff-401d-81ef-f37eaceb3103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.779081] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 871.779081] env[62619]: value = "task-1364984" [ 871.779081] env[62619]: _type = "Task" [ 871.779081] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 871.785843] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364984, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 871.787189] env[62619]: DEBUG nova.scheduler.client.report [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 871.878030] env[62619]: DEBUG nova.compute.manager [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Received event network-vif-plugged-65733022-a28a-4fb7-8d06-564479643fc0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.878254] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] Acquiring lock "1c1b7717-30a9-40c9-913f-6d65a619b94a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 871.878505] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] Lock "1c1b7717-30a9-40c9-913f-6d65a619b94a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 871.878628] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] Lock "1c1b7717-30a9-40c9-913f-6d65a619b94a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 871.878796] env[62619]: DEBUG nova.compute.manager [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] No waiting events found dispatching network-vif-plugged-65733022-a28a-4fb7-8d06-564479643fc0 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 871.878960] env[62619]: WARNING nova.compute.manager [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Received unexpected event network-vif-plugged-65733022-a28a-4fb7-8d06-564479643fc0 for instance with vm_state building and task_state spawning. [ 871.879141] env[62619]: DEBUG nova.compute.manager [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Received event network-changed-65733022-a28a-4fb7-8d06-564479643fc0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 871.879295] env[62619]: DEBUG nova.compute.manager [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Refreshing instance network info cache due to event network-changed-65733022-a28a-4fb7-8d06-564479643fc0. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 871.879458] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] Acquiring lock "refresh_cache-1c1b7717-30a9-40c9-913f-6d65a619b94a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.906051] env[62619]: DEBUG nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 871.930482] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 871.930809] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 871.930980] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 871.931182] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 871.931330] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 871.931477] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 871.931924] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 871.932151] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 871.932345] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 871.932518] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 871.932700] env[62619]: DEBUG nova.virt.hardware [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 871.933569] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea1b9751-7f02-4423-8d84-3ab6011eb494 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.952297] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6049f2c-01d1-4b69-afac-15753d51ac27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.955809] env[62619]: DEBUG oslo_vmware.api [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364981, 'name': PowerOnVM_Task, 'duration_secs': 1.635718} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.956116] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 871.956325] env[62619]: INFO nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Took 9.59 seconds to spawn the instance on the hypervisor. [ 871.956550] env[62619]: DEBUG nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 871.957857] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf4761a-6725-43f3-8f70-402a6304deaf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.970561] env[62619]: DEBUG nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 871.973365] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5247fa7c-8dac-96f1-faf7-ac11ecb8620d, 'name': SearchDatastore_Task, 'duration_secs': 0.033453} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 871.974158] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 871.974388] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 871.974630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 871.974780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 871.974954] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 871.975207] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a7fe75f9-a0b6-423e-9434-68b553f66e94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.991730] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 871.992442] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 871.993229] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cfda3b34-e7c7-4bdc-b177-869495cc3535 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 871.997800] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 871.997800] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529f8adb-1d2a-5194-9956-fad28a5a7517" [ 871.997800] env[62619]: _type = "Task" [ 871.997800] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.005787] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529f8adb-1d2a-5194-9956-fad28a5a7517, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.286514] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364984, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.296805] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.415s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 872.297135] env[62619]: DEBUG nova.compute.manager [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 872.299615] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 13.526s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 872.299836] env[62619]: DEBUG nova.objects.instance [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lazy-loading 'resources' on Instance uuid 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 872.302999] env[62619]: DEBUG nova.network.neutron [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 872.443449] env[62619]: DEBUG nova.network.neutron [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Updating instance_info_cache with network_info: [{"id": "65733022-a28a-4fb7-8d06-564479643fc0", "address": "fa:16:3e:00:25:a7", "network": {"id": "17acf28e-3c95-4a28-a211-8598f95532aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1059820077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998daea123aa48b2816d1cbe9e662950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65733022-a2", "ovs_interfaceid": "65733022-a28a-4fb7-8d06-564479643fc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 872.487194] env[62619]: INFO nova.compute.manager [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Took 23.39 seconds to build instance. [ 872.494904] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 872.508232] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529f8adb-1d2a-5194-9956-fad28a5a7517, 'name': SearchDatastore_Task, 'duration_secs': 0.013186} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 872.509042] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d8982c1e-00a7-4fd9-8a7e-58e8b3605f8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.514452] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 872.514452] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525dbce6-bdaf-a682-39eb-ac7bcd2e26d3" [ 872.514452] env[62619]: _type = "Task" [ 872.514452] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 872.522014] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525dbce6-bdaf-a682-39eb-ac7bcd2e26d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.788068] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364984, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 872.802918] env[62619]: DEBUG nova.compute.utils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 872.804487] env[62619]: DEBUG nova.compute.manager [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 872.945753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "refresh_cache-1c1b7717-30a9-40c9-913f-6d65a619b94a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 872.946141] env[62619]: DEBUG nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Instance network_info: |[{"id": "65733022-a28a-4fb7-8d06-564479643fc0", "address": "fa:16:3e:00:25:a7", "network": {"id": "17acf28e-3c95-4a28-a211-8598f95532aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1059820077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998daea123aa48b2816d1cbe9e662950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65733022-a2", "ovs_interfaceid": "65733022-a28a-4fb7-8d06-564479643fc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 872.946472] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] Acquired lock "refresh_cache-1c1b7717-30a9-40c9-913f-6d65a619b94a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 872.946659] env[62619]: DEBUG nova.network.neutron [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Refreshing network info cache for port 65733022-a28a-4fb7-8d06-564479643fc0 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 872.947924] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:00:25:a7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '65733022-a28a-4fb7-8d06-564479643fc0', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 872.958934] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating folder: Project (998daea123aa48b2816d1cbe9e662950). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.963685] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b7c14b4-a10f-4bf7-8a45-69066c421dd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.977437] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Created folder: Project (998daea123aa48b2816d1cbe9e662950) in parent group-v290436. [ 872.977437] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating folder: Instances. Parent ref: group-v290491. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 872.977437] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-356ccb70-2d66-4db5-b522-ad5f627993eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 872.987561] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Created folder: Instances in parent group-v290491. [ 872.987982] env[62619]: DEBUG oslo.service.loopingcall [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 872.988123] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 872.988363] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d2c7c1f1-c637-4506-bde2-8f6d1693b68c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.009847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dec196e6-e3ad-48d8-a7c7-3fc807e2786a tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "59b960b6-aa41-4409-a899-9829388c3ff2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 53.124s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.016088] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 873.016088] env[62619]: value = "task-1364987" [ 873.016088] env[62619]: _type = "Task" [ 873.016088] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.029382] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525dbce6-bdaf-a682-39eb-ac7bcd2e26d3, 'name': SearchDatastore_Task, 'duration_secs': 0.011568} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.032545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 873.033109] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] f60e0bec-0811-4e91-bc45-b61874846497/f60e0bec-0811-4e91-bc45-b61874846497.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 873.033109] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364987, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.034220] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-4838a77f-3461-4844-991d-288b02834f80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.036972] env[62619]: DEBUG nova.compute.manager [req-4e4ae0bb-2991-48f5-a01e-658bffae2c35 req-9ecb73b2-5df6-4140-8ef8-0d23459fada6 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received event network-vif-plugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 873.037200] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e4ae0bb-2991-48f5-a01e-658bffae2c35 req-9ecb73b2-5df6-4140-8ef8-0d23459fada6 service nova] Acquiring lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 873.037414] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e4ae0bb-2991-48f5-a01e-658bffae2c35 req-9ecb73b2-5df6-4140-8ef8-0d23459fada6 service nova] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 873.037581] env[62619]: DEBUG oslo_concurrency.lockutils [req-4e4ae0bb-2991-48f5-a01e-658bffae2c35 req-9ecb73b2-5df6-4140-8ef8-0d23459fada6 service nova] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 873.037756] env[62619]: DEBUG nova.compute.manager [req-4e4ae0bb-2991-48f5-a01e-658bffae2c35 req-9ecb73b2-5df6-4140-8ef8-0d23459fada6 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] No waiting events found dispatching network-vif-plugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 873.037927] env[62619]: WARNING nova.compute.manager [req-4e4ae0bb-2991-48f5-a01e-658bffae2c35 req-9ecb73b2-5df6-4140-8ef8-0d23459fada6 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received unexpected event network-vif-plugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e for instance with vm_state building and task_state spawning. [ 873.045073] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 873.045073] env[62619]: value = "task-1364988" [ 873.045073] env[62619]: _type = "Task" [ 873.045073] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.052899] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364988, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.079229] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33da116a-4355-4b44-858e-5e923a9e86f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.087535] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9537f983-6f21-457a-83fb-1ef89c8b312c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.121314] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-278cfb33-9013-426b-aa53-877fde35b6fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.129364] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5451233e-b077-483a-8042-04d3e59b8fc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.143300] env[62619]: DEBUG nova.compute.provider_tree [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 873.289199] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364984, 'name': ReconfigVM_Task, 'duration_secs': 1.070921} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.289706] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Reconfigured VM instance instance-00000048 to attach disk [datastore2] 055a1071-bd4b-4d1b-88c0-7551a07aee9a/055a1071-bd4b-4d1b-88c0-7551a07aee9a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 873.290353] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-aad05caf-826b-4c54-8aa8-de679d077af3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.297605] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 873.297605] env[62619]: value = "task-1364989" [ 873.297605] env[62619]: _type = "Task" [ 873.297605] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.308797] env[62619]: DEBUG nova.compute.manager [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 873.311969] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364989, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.524956] env[62619]: DEBUG nova.network.neutron [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Successfully updated port: 8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 873.533178] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364987, 'name': CreateVM_Task, 'duration_secs': 0.373806} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.533743] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 873.534111] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.534286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.534611] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 873.534879] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fd329e11-9082-4280-b04e-ad75419cab36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.540564] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 873.540564] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525c738d-3170-0623-a0f0-34e927b97ea9" [ 873.540564] env[62619]: _type = "Task" [ 873.540564] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.554983] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525c738d-3170-0623-a0f0-34e927b97ea9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.559856] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364988, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.503141} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.560119] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] f60e0bec-0811-4e91-bc45-b61874846497/f60e0bec-0811-4e91-bc45-b61874846497.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 873.560375] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 873.560658] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a9804473-8229-4393-a9bd-878867800b8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.566569] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 873.566569] env[62619]: value = "task-1364990" [ 873.566569] env[62619]: _type = "Task" [ 873.566569] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.576338] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364990, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.646623] env[62619]: DEBUG nova.scheduler.client.report [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 873.690093] env[62619]: DEBUG nova.network.neutron [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Updated VIF entry in instance network info cache for port 65733022-a28a-4fb7-8d06-564479643fc0. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 873.690467] env[62619]: DEBUG nova.network.neutron [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Updating instance_info_cache with network_info: [{"id": "65733022-a28a-4fb7-8d06-564479643fc0", "address": "fa:16:3e:00:25:a7", "network": {"id": "17acf28e-3c95-4a28-a211-8598f95532aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1059820077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998daea123aa48b2816d1cbe9e662950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap65733022-a2", "ovs_interfaceid": "65733022-a28a-4fb7-8d06-564479643fc0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 873.809137] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364989, 'name': Rename_Task, 'duration_secs': 0.329506} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 873.809423] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 873.809676] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-10cbede3-488b-4090-ac6a-a17d99baf078 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 873.817031] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 873.817031] env[62619]: value = "task-1364991" [ 873.817031] env[62619]: _type = "Task" [ 873.817031] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 873.824443] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364991, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 873.958221] env[62619]: DEBUG nova.compute.manager [req-d6ddc8a5-0fcc-4925-834b-973b9221256a req-5ef265fc-b050-466e-81bc-804a2a209272 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received event network-changed-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 873.958546] env[62619]: DEBUG nova.compute.manager [req-d6ddc8a5-0fcc-4925-834b-973b9221256a req-5ef265fc-b050-466e-81bc-804a2a209272 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Refreshing instance network info cache due to event network-changed-8c7fa88a-7dbd-49cf-b490-e311fa9a804e. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 873.958695] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6ddc8a5-0fcc-4925-834b-973b9221256a req-5ef265fc-b050-466e-81bc-804a2a209272 service nova] Acquiring lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 873.958846] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6ddc8a5-0fcc-4925-834b-973b9221256a req-5ef265fc-b050-466e-81bc-804a2a209272 service nova] Acquired lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 873.959026] env[62619]: DEBUG nova.network.neutron [req-d6ddc8a5-0fcc-4925-834b-973b9221256a req-5ef265fc-b050-466e-81bc-804a2a209272 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Refreshing network info cache for port 8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 874.030918] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.051458] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525c738d-3170-0623-a0f0-34e927b97ea9, 'name': SearchDatastore_Task, 'duration_secs': 0.014587} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.051776] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.052031] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 874.052281] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 874.052429] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 874.052622] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 874.052906] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-94740fa9-7532-402b-8262-1caa7b077e3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.062018] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 874.062297] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 874.062948] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c72a3ed5-267c-4ebd-87a0-109b2f5f391e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.068278] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 874.068278] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d8dec0-a979-f1dc-dd57-6f35f9886295" [ 874.068278] env[62619]: _type = "Task" [ 874.068278] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.079314] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364990, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.066988} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.082457] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 874.082794] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d8dec0-a979-f1dc-dd57-6f35f9886295, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.083670] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29195384-f11c-4834-8b22-b392ba5a5457 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.107863] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Reconfiguring VM instance instance-00000049 to attach disk [datastore2] f60e0bec-0811-4e91-bc45-b61874846497/f60e0bec-0811-4e91-bc45-b61874846497.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 874.108583] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d4f7e0a-2772-42e6-8a0a-52df426a1468 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.130972] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 874.130972] env[62619]: value = "task-1364992" [ 874.130972] env[62619]: _type = "Task" [ 874.130972] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.139530] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364992, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.151790] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.155052] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.779s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 874.155192] env[62619]: DEBUG nova.objects.instance [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lazy-loading 'resources' on Instance uuid 61d68c36-5251-4fad-9d3b-125296ae0861 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 874.175864] env[62619]: INFO nova.scheduler.client.report [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Deleted allocations for instance 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a [ 874.193841] env[62619]: DEBUG oslo_concurrency.lockutils [req-0c0f25e2-fff7-4b46-9bf3-58a9ab8ce102 req-ab6829ff-1a92-4543-94c3-1b3fb397a753 service nova] Releasing lock "refresh_cache-1c1b7717-30a9-40c9-913f-6d65a619b94a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 874.318977] env[62619]: DEBUG nova.compute.manager [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 874.332500] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364991, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.346629] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 874.346972] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 874.347169] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 874.347410] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 874.347652] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 874.347867] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 874.348174] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 874.348386] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 874.348666] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 874.348910] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 874.349155] env[62619]: DEBUG nova.virt.hardware [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 874.350125] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a650aa9-90af-444f-a971-872607ab16ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.359607] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36c9743c-25de-464a-b0e6-8a2379cd709f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.375845] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 874.381165] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Creating folder: Project (1a9ff5cf13874116b7d90ab4c1a46a59). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.381517] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-56b9dd42-9ba5-403d-bd21-a80dd1d33dbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.392611] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Created folder: Project (1a9ff5cf13874116b7d90ab4c1a46a59) in parent group-v290436. [ 874.392881] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Creating folder: Instances. Parent ref: group-v290494. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 874.393258] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f271fd81-9e58-4128-aad0-cf23613e7aec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.403943] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Created folder: Instances in parent group-v290494. [ 874.404237] env[62619]: DEBUG oslo.service.loopingcall [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 874.404459] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 874.404693] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-aaaa29d3-ca31-47ee-bb47-9dc61cdf341c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.428604] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 874.428604] env[62619]: value = "task-1364995" [ 874.428604] env[62619]: _type = "Task" [ 874.428604] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.437326] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364995, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.502778] env[62619]: DEBUG nova.network.neutron [req-d6ddc8a5-0fcc-4925-834b-973b9221256a req-5ef265fc-b050-466e-81bc-804a2a209272 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 874.581867] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d8dec0-a979-f1dc-dd57-6f35f9886295, 'name': SearchDatastore_Task, 'duration_secs': 0.017405} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.582652] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7aa0fd31-06c0-4bf2-8d1a-6b87a40f6b6c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.588089] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 874.588089] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52320517-82c2-e237-e4f0-ee224c6f5cc8" [ 874.588089] env[62619]: _type = "Task" [ 874.588089] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.592096] env[62619]: DEBUG nova.network.neutron [req-d6ddc8a5-0fcc-4925-834b-973b9221256a req-5ef265fc-b050-466e-81bc-804a2a209272 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 874.599149] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52320517-82c2-e237-e4f0-ee224c6f5cc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.641805] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364992, 'name': ReconfigVM_Task, 'duration_secs': 0.352503} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.642169] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Reconfigured VM instance instance-00000049 to attach disk [datastore2] f60e0bec-0811-4e91-bc45-b61874846497/f60e0bec-0811-4e91-bc45-b61874846497.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 874.642795] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f47ec93c-381d-4628-bdce-fe30c42820b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.649275] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 874.649275] env[62619]: value = "task-1364996" [ 874.649275] env[62619]: _type = "Task" [ 874.649275] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 874.656505] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364996, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 874.682934] env[62619]: DEBUG oslo_concurrency.lockutils [None req-793bdc23-ec0e-4740-bedf-578983ba78ed tempest-ServerMetadataTestJSON-580174521 tempest-ServerMetadataTestJSON-580174521-project-member] Lock "29f16f05-fe2f-4c16-ab8c-6fb210bbce8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.042s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 874.832529] env[62619]: DEBUG oslo_vmware.api [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1364991, 'name': PowerOnVM_Task, 'duration_secs': 0.625801} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 874.834976] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 874.835215] env[62619]: INFO nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Took 10.07 seconds to spawn the instance on the hypervisor. [ 874.835401] env[62619]: DEBUG nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 874.836374] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c793374e-e80f-4b4d-9b9b-1066fef18640 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.847329] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53640106-ce3f-454b-8b9a-689357d047e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.854750] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e4fee03-a7b0-4ed1-96af-c2968f74fbee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.888211] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee9c411-973d-4c79-9ab0-3012fba32542 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.896557] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7fb254cf-9acd-4a7c-8c48-e9588c1ff1ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.910848] env[62619]: DEBUG nova.compute.provider_tree [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 874.937822] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364995, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.094733] env[62619]: DEBUG oslo_concurrency.lockutils [req-d6ddc8a5-0fcc-4925-834b-973b9221256a req-5ef265fc-b050-466e-81bc-804a2a209272 service nova] Releasing lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.098626] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.098792] env[62619]: DEBUG nova.network.neutron [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 875.099895] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52320517-82c2-e237-e4f0-ee224c6f5cc8, 'name': SearchDatastore_Task, 'duration_secs': 0.048934} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.100153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.100396] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 1c1b7717-30a9-40c9-913f-6d65a619b94a/1c1b7717-30a9-40c9-913f-6d65a619b94a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 875.100872] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0f836da3-2659-4ae4-9aa5-3a5dd623e2a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.107854] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 875.107854] env[62619]: value = "task-1364997" [ 875.107854] env[62619]: _type = "Task" [ 875.107854] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.116242] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1364997, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.160417] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364996, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.359674] env[62619]: INFO nova.compute.manager [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Took 25.76 seconds to build instance. [ 875.413617] env[62619]: DEBUG nova.scheduler.client.report [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 875.440240] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1364995, 'name': CreateVM_Task, 'duration_secs': 0.957637} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.440417] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 875.440844] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.441016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.441342] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 875.441605] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfc2ba39-6d95-462b-9954-3bea98bce2eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.446136] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 875.446136] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520d8c53-4343-c4d4-a815-cf545c942804" [ 875.446136] env[62619]: _type = "Task" [ 875.446136] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.453926] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520d8c53-4343-c4d4-a815-cf545c942804, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.617834] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1364997, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.634710] env[62619]: DEBUG nova.network.neutron [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 875.663025] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364996, 'name': Rename_Task, 'duration_secs': 0.951565} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.663025] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 875.663025] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a1c6b8ad-d83c-40ab-8d76-ae173ed2077c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.669683] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 875.669683] env[62619]: value = "task-1364998" [ 875.669683] env[62619]: _type = "Task" [ 875.669683] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 875.679468] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364998, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 875.786068] env[62619]: DEBUG nova.network.neutron [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [{"id": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "address": "fa:16:3e:2e:a8:22", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7fa88a-7d", "ovs_interfaceid": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 875.861588] env[62619]: DEBUG oslo_concurrency.lockutils [None req-09b0be60-fc92-4edf-a1d4-565423e1ec05 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 55.564s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.919335] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.764s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 875.922668] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 14.139s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 875.926622] env[62619]: INFO nova.compute.claims [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 875.945997] env[62619]: INFO nova.scheduler.client.report [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Deleted allocations for instance 61d68c36-5251-4fad-9d3b-125296ae0861 [ 875.971661] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520d8c53-4343-c4d4-a815-cf545c942804, 'name': SearchDatastore_Task, 'duration_secs': 0.0126} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 875.975359] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 875.975629] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 875.975882] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 875.976046] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 875.976970] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 875.976970] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fe4330b6-ca39-42fb-b5b5-f7797b60d9b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.987409] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 875.987662] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 875.989271] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e4ae41da-a8f9-4618-a673-f1b6412b17be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.996026] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 875.996026] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52676938-4a00-1afa-7381-32dda74c97cc" [ 875.996026] env[62619]: _type = "Task" [ 875.996026] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.004967] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52676938-4a00-1afa-7381-32dda74c97cc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.120371] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1364997, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.179580] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364998, 'name': PowerOnVM_Task} progress is 37%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.289214] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 876.289553] env[62619]: DEBUG nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Instance network_info: |[{"id": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "address": "fa:16:3e:2e:a8:22", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7fa88a-7d", "ovs_interfaceid": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 876.290017] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:a8:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c7fa88a-7dbd-49cf-b490-e311fa9a804e', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 876.298588] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating folder: Project (578df9b6434d416fbae5f3cf2c33ef1b). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.299184] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-931b278c-0381-48df-af60-9458be0b4d41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.313262] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Created folder: Project (578df9b6434d416fbae5f3cf2c33ef1b) in parent group-v290436. [ 876.313262] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating folder: Instances. Parent ref: group-v290497. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 876.313262] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f16b02a1-cff5-4a5a-83b9-a9326c911aab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.320147] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Created folder: Instances in parent group-v290497. [ 876.320393] env[62619]: DEBUG oslo.service.loopingcall [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 876.320592] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 876.320835] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f152d35d-f5ff-4dde-a039-643213c14fc9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.340353] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 876.340353] env[62619]: value = "task-1365001" [ 876.340353] env[62619]: _type = "Task" [ 876.340353] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.348782] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365001, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.468491] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4806ba4f-cc37-4945-a696-4794d1882668 tempest-ServersTestMultiNic-642164647 tempest-ServersTestMultiNic-642164647-project-member] Lock "61d68c36-5251-4fad-9d3b-125296ae0861" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 19.232s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 876.509926] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52676938-4a00-1afa-7381-32dda74c97cc, 'name': SearchDatastore_Task, 'duration_secs': 0.01458} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 876.510775] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-622f0d57-85d2-4712-81b8-c4ca161986e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 876.517020] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 876.517020] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52592cbc-e794-1ee9-42c3-d83d93eb5ddb" [ 876.517020] env[62619]: _type = "Task" [ 876.517020] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 876.528073] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52592cbc-e794-1ee9-42c3-d83d93eb5ddb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.619644] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1364997, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.681866] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364998, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 876.857404] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365001, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.035462] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52592cbc-e794-1ee9-42c3-d83d93eb5ddb, 'name': SearchDatastore_Task, 'duration_secs': 0.018413} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.035742] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.036022] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] fa4e9947-5b99-4447-9535-6dbcaba635f8/fa4e9947-5b99-4447-9535-6dbcaba635f8.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 877.036305] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72e52292-39cc-4b1b-9dd4-c84b4321fd79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.048134] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 877.048134] env[62619]: value = "task-1365002" [ 877.048134] env[62619]: _type = "Task" [ 877.048134] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.060549] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365002, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.121964] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1364997, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.7597} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.122413] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 1c1b7717-30a9-40c9-913f-6d65a619b94a/1c1b7717-30a9-40c9-913f-6d65a619b94a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 877.122593] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 877.123313] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c259f329-4fa0-4bb6-b8b5-893f457b783c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.132174] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 877.132174] env[62619]: value = "task-1365003" [ 877.132174] env[62619]: _type = "Task" [ 877.132174] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.142979] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365003, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.182361] env[62619]: DEBUG oslo_vmware.api [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1364998, 'name': PowerOnVM_Task, 'duration_secs': 1.406471} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.183310] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 877.183310] env[62619]: INFO nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Took 10.01 seconds to spawn the instance on the hypervisor. [ 877.183310] env[62619]: DEBUG nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 877.183946] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f64b257-0288-442f-a679-a690b02a4c41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.202261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c2652b-98fc-4378-a391-107f706f8710 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.209552] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d543fb4-c7a8-4d12-a276-adec2dd57688 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.243452] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cb52d7a-7deb-4368-a75b-bd31f6b1cbe9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.253139] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-572c3478-efbc-45ff-85e0-b7ba225696d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.271189] env[62619]: DEBUG nova.compute.provider_tree [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 877.354486] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365001, 'name': CreateVM_Task, 'duration_secs': 0.686307} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.354732] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 877.355740] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.355922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.356442] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 877.356835] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-27fb8587-7764-404d-b0f7-42ffc37c8cdf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.362514] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 877.362514] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520eb6bb-01d2-1dd7-4f91-1b2962c12d7a" [ 877.362514] env[62619]: _type = "Task" [ 877.362514] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.371372] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520eb6bb-01d2-1dd7-4f91-1b2962c12d7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.563266] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365002, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.643403] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365003, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.06391} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.643695] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 877.645051] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ffe2776-2764-46ba-9c63-398a059477ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.667600] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Reconfiguring VM instance instance-0000004a to attach disk [datastore1] 1c1b7717-30a9-40c9-913f-6d65a619b94a/1c1b7717-30a9-40c9-913f-6d65a619b94a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 877.667940] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f237a4cb-d72d-4944-92d4-373837434c2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.688528] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 877.688528] env[62619]: value = "task-1365004" [ 877.688528] env[62619]: _type = "Task" [ 877.688528] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.697135] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365004, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 877.704776] env[62619]: INFO nova.compute.manager [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Took 22.78 seconds to build instance. [ 877.777061] env[62619]: DEBUG nova.scheduler.client.report [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 877.873188] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520eb6bb-01d2-1dd7-4f91-1b2962c12d7a, 'name': SearchDatastore_Task, 'duration_secs': 0.065197} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 877.873508] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 877.873796] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 877.873989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 877.874155] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 877.874335] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 877.874596] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3c35a166-56bc-4814-b13c-6ccae8294d3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.884344] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 877.884534] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 877.885265] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77531010-bdde-4a49-9812-77c58c335769 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 877.890244] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 877.890244] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b079e6-5ee9-bf5e-0922-88e92a2f18b6" [ 877.890244] env[62619]: _type = "Task" [ 877.890244] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 877.897923] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b079e6-5ee9-bf5e-0922-88e92a2f18b6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.065064] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365002, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.558448} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.065380] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] fa4e9947-5b99-4447-9535-6dbcaba635f8/fa4e9947-5b99-4447-9535-6dbcaba635f8.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 878.065648] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 878.065942] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5e004d48-54eb-47f8-b4e5-0bfff4a4cd40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.075976] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 878.075976] env[62619]: value = "task-1365005" [ 878.075976] env[62619]: _type = "Task" [ 878.075976] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.085786] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365005, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.198642] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365004, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.207302] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cc93b19-168c-4f34-933f-34166d9d1000 tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "f60e0bec-0811-4e91-bc45-b61874846497" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.673s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.279456] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.357s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 878.279987] env[62619]: DEBUG nova.compute.manager [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 878.282971] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 16.226s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.284658] env[62619]: INFO nova.compute.claims [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 878.406317] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b079e6-5ee9-bf5e-0922-88e92a2f18b6, 'name': SearchDatastore_Task, 'duration_secs': 0.014556} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.407857] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b765308-3250-47c7-9e8f-78c7e260ff63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.415424] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 878.415424] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521407ee-3893-701a-e2f4-674e2638c86d" [ 878.415424] env[62619]: _type = "Task" [ 878.415424] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.424028] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521407ee-3893-701a-e2f4-674e2638c86d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.541441] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "f60e0bec-0811-4e91-bc45-b61874846497" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.543048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "f60e0bec-0811-4e91-bc45-b61874846497" acquired by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.543048] env[62619]: INFO nova.compute.manager [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Rebooting instance [ 878.587744] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365005, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.286462} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.591145] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 878.593183] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc5268d4-eca3-4e69-b757-da12b113b412 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.618332] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Reconfiguring VM instance instance-0000004c to attach disk [datastore1] fa4e9947-5b99-4447-9535-6dbcaba635f8/fa4e9947-5b99-4447-9535-6dbcaba635f8.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 878.619013] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-cc8d011e-438d-4447-aeee-c8f268de2f9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.639648] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 878.639648] env[62619]: value = "task-1365006" [ 878.639648] env[62619]: _type = "Task" [ 878.639648] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.647909] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365006, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.700299] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365004, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 878.793775] env[62619]: DEBUG nova.compute.utils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 878.795880] env[62619]: DEBUG nova.compute.manager [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Not allocating networking since 'none' was specified. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1977}} [ 878.859178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "ca452ef6-d777-46dd-a313-ae7dd441adca" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 878.859462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 878.859646] env[62619]: DEBUG nova.compute.manager [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 878.860766] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3e7aaf-84bf-4476-bfd0-8526f9c2e0ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.867473] env[62619]: DEBUG nova.compute.manager [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 878.868284] env[62619]: DEBUG nova.objects.instance [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lazy-loading 'flavor' on Instance uuid ca452ef6-d777-46dd-a313-ae7dd441adca {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 878.924977] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521407ee-3893-701a-e2f4-674e2638c86d, 'name': SearchDatastore_Task, 'duration_secs': 0.017398} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 878.926122] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 878.926122] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a/3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 878.926122] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d1be0938-5ed5-4d7c-a3c1-05e6bb98efca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 878.932399] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 878.932399] env[62619]: value = "task-1365007" [ 878.932399] env[62619]: _type = "Task" [ 878.932399] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 878.940275] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365007, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.067205] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 879.067413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquired lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 879.067656] env[62619]: DEBUG nova.network.neutron [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 879.150236] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365006, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.200638] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365004, 'name': ReconfigVM_Task, 'duration_secs': 1.065573} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.201103] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Reconfigured VM instance instance-0000004a to attach disk [datastore1] 1c1b7717-30a9-40c9-913f-6d65a619b94a/1c1b7717-30a9-40c9-913f-6d65a619b94a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.201957] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6028c6de-b0e9-4be1-b281-237b305b6447 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.209595] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 879.209595] env[62619]: value = "task-1365008" [ 879.209595] env[62619]: _type = "Task" [ 879.209595] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.218600] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365008, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.299894] env[62619]: DEBUG nova.compute.manager [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 879.373557] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 879.376732] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-fc16fd3b-0f31-42c1-af40-f346c6a40b93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.384538] env[62619]: DEBUG oslo_vmware.api [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 879.384538] env[62619]: value = "task-1365009" [ 879.384538] env[62619]: _type = "Task" [ 879.384538] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.394379] env[62619]: DEBUG oslo_vmware.api [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365009, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.445454] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365007, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.526667] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae5a9323-dbaa-4e08-8b11-7ab3a3c93719 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.537866] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-216f3c71-38a7-4de6-93d9-c8bb6e674508 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.573350] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-849a8f93-76ed-43a4-b59f-746eaf16780b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.582767] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d95dbd5e-409f-4023-8cfa-a1dd1dd65b39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.600294] env[62619]: DEBUG nova.compute.provider_tree [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 879.652963] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365006, 'name': ReconfigVM_Task, 'duration_secs': 0.733625} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.653439] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Reconfigured VM instance instance-0000004c to attach disk [datastore1] fa4e9947-5b99-4447-9535-6dbcaba635f8/fa4e9947-5b99-4447-9535-6dbcaba635f8.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 879.654602] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-49747ce0-f812-4daf-9434-905969dbbfc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.661230] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 879.661230] env[62619]: value = "task-1365010" [ 879.661230] env[62619]: _type = "Task" [ 879.661230] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.674068] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365010, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.719450] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365008, 'name': Rename_Task, 'duration_secs': 0.225815} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.719826] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 879.720128] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7a23cd98-0c9b-47ff-8c72-a91e0ef7ec32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.728267] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 879.728267] env[62619]: value = "task-1365011" [ 879.728267] env[62619]: _type = "Task" [ 879.728267] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.739375] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365011, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 879.853614] env[62619]: DEBUG nova.network.neutron [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Updating instance_info_cache with network_info: [{"id": "364b7b78-e7ea-45f8-986e-ee77f8b3fd35", "address": "fa:16:3e:97:b0:13", "network": {"id": "cb501158-0342-4b85-acd0-a84f20d40980", "bridge": "br-int", "label": "tempest-InstanceActionsTestJSON-1965346461-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "d0a67999ce56460cb5744f79256d1a2e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7d689fd7-f53e-4fd3-80d9-8d6b8fb7a164", "external-id": "nsx-vlan-transportzone-972", "segmentation_id": 972, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap364b7b78-e7", "ovs_interfaceid": "364b7b78-e7ea-45f8-986e-ee77f8b3fd35", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 879.895088] env[62619]: DEBUG oslo_vmware.api [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365009, 'name': PowerOffVM_Task, 'duration_secs': 0.286954} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.895524] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 879.895606] env[62619]: DEBUG nova.compute.manager [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 879.896402] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8846b8c-5ad2-45ba-b5c0-10dadc5657c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.944817] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365007, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.94757} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 879.945159] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a/3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 879.945496] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 879.945682] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b1787a1f-d265-467b-8310-fdffc38a6f6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 879.953330] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 879.953330] env[62619]: value = "task-1365012" [ 879.953330] env[62619]: _type = "Task" [ 879.953330] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 879.961626] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365012, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.104629] env[62619]: DEBUG nova.scheduler.client.report [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 880.172749] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365010, 'name': Rename_Task, 'duration_secs': 0.261129} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.173067] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 880.173362] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3fcf90c1-27fb-4429-8584-a093a41735f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.179953] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 880.179953] env[62619]: value = "task-1365013" [ 880.179953] env[62619]: _type = "Task" [ 880.179953] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.187771] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365013, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.238093] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365011, 'name': PowerOnVM_Task} progress is 71%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.313351] env[62619]: DEBUG nova.compute.manager [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 880.339843] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 880.340150] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 880.340349] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 880.340617] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 880.340795] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 880.341009] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 880.341456] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 880.341456] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 880.341655] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 880.341832] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 880.342316] env[62619]: DEBUG nova.virt.hardware [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 880.343052] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccb6f6df-3ce1-4812-b607-8e8c333c7009 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.351905] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fed70eaf-31a3-47e0-9105-f7a7200feccc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.357527] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Releasing lock "refresh_cache-f60e0bec-0811-4e91-bc45-b61874846497" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 880.368043] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 880.373464] env[62619]: DEBUG oslo.service.loopingcall [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 880.374012] env[62619]: DEBUG nova.compute.manager [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 880.374296] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 880.374994] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ed38f57-63cb-4d99-9e72-2e01475e01bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.377454] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-dc6ed597-5f53-4eef-9245-61807f94d008 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.398146] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 880.398146] env[62619]: value = "task-1365014" [ 880.398146] env[62619]: _type = "Task" [ 880.398146] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.407963] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365014, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.408471] env[62619]: DEBUG oslo_concurrency.lockutils [None req-15d101bb-e2ae-4093-bfe4-a50fd3930024 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.549s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.462969] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365012, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.316804} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 880.463253] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 880.464042] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa4786cb-48d0-4e13-81f5-4f0f76a64ce1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.485867] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a/3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 880.486112] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5fb8189b-0f79-4849-8af7-5d5f7cbf0742 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 880.504648] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 880.504648] env[62619]: value = "task-1365015" [ 880.504648] env[62619]: _type = "Task" [ 880.504648] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 880.513115] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365015, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.609956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.327s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 880.610661] env[62619]: DEBUG nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 880.613673] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 15.968s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 880.613974] env[62619]: DEBUG nova.objects.instance [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lazy-loading 'resources' on Instance uuid cfa74201-783f-4ef4-8860-e2f53e4dfb81 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 880.691691] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365013, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.738761] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365011, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 880.912327] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365014, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.015495] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365015, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.117195] env[62619]: DEBUG nova.compute.utils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 881.118720] env[62619]: DEBUG nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 881.118831] env[62619]: DEBUG nova.network.neutron [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 881.186556] env[62619]: DEBUG nova.policy [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1ab4be10d444359a7a3b245ec9b9ea0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c583f4e7b29743aabd3e96f7c53fa04f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 881.195570] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365013, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.241130] env[62619]: DEBUG oslo_vmware.api [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365011, 'name': PowerOnVM_Task, 'duration_secs': 1.191513} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.241657] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.242318] env[62619]: INFO nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Took 11.73 seconds to spawn the instance on the hypervisor. [ 881.242650] env[62619]: DEBUG nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 881.243855] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b32698ed-d1c7-4485-ad0d-6c6767a4fb46 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.353361] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0219b5d4-5d6c-4f01-9d67-3ab200c82bd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.365144] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-19e7ef5e-54ed-4011-a3bd-eae3c1723012 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.418026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35eee2b4-3509-4af6-9b77-3fe1758873f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.418026] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18390f46-3759-4e73-a0a5-3caee67090ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.428854] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365014, 'name': CreateVM_Task, 'duration_secs': 0.564687} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.435606] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 881.436902] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Doing hard reboot of VM {{(pid=62619) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1063}} [ 881.437831] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.438276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.438703] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 881.441019] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ResetVM_Task with opID=oslo.vmware-44daa84c-248e-45ab-b2a4-9cf73182122f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.443058] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a2f9f63-a556-4dc0-9962-3d4d9844c35c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.449450] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-def0ab4c-6386-44a3-8e3c-e0ebc9e1aab2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.455397] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 881.455397] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5249e2d5-4d62-b835-b7ae-0c36b93a1f43" [ 881.455397] env[62619]: _type = "Task" [ 881.455397] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.465039] env[62619]: DEBUG nova.compute.provider_tree [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 881.466252] env[62619]: DEBUG oslo_vmware.api [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 881.466252] env[62619]: value = "task-1365016" [ 881.466252] env[62619]: _type = "Task" [ 881.466252] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.469019] env[62619]: DEBUG nova.network.neutron [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Successfully created port: a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 881.476970] env[62619]: DEBUG nova.objects.instance [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lazy-loading 'flavor' on Instance uuid ca452ef6-d777-46dd-a313-ae7dd441adca {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.482550] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5249e2d5-4d62-b835-b7ae-0c36b93a1f43, 'name': SearchDatastore_Task, 'duration_secs': 0.013233} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.483213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 881.483615] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 881.483970] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.484294] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.484597] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 881.488167] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e1016665-f5ae-416d-9226-9f86f7a1ff3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.490513] env[62619]: DEBUG oslo_vmware.api [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1365016, 'name': ResetVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.499041] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 881.499041] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 881.499905] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-946e5183-521a-4fe0-905c-1e17daedafab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.506583] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 881.506583] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526a1ecf-1b18-3836-b13f-308d6dc7138e" [ 881.506583] env[62619]: _type = "Task" [ 881.506583] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.521106] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365015, 'name': ReconfigVM_Task, 'duration_secs': 0.856582} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.521349] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526a1ecf-1b18-3836-b13f-308d6dc7138e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.521723] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a/3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 881.522294] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6a16f991-3f18-4f78-a173-08dd489d182f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.528672] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 881.528672] env[62619]: value = "task-1365017" [ 881.528672] env[62619]: _type = "Task" [ 881.528672] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 881.536799] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365017, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 881.625037] env[62619]: DEBUG nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 881.690881] env[62619]: DEBUG oslo_vmware.api [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365013, 'name': PowerOnVM_Task, 'duration_secs': 1.121901} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.691089] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 881.691416] env[62619]: INFO nova.compute.manager [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Took 7.37 seconds to spawn the instance on the hypervisor. [ 881.691462] env[62619]: DEBUG nova.compute.manager [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 881.692480] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa18adc0-f134-415b-890a-cbea0ddd4732 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 881.762643] env[62619]: INFO nova.compute.manager [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Took 25.65 seconds to build instance. [ 881.974021] env[62619]: DEBUG nova.scheduler.client.report [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 881.989487] env[62619]: DEBUG oslo_vmware.api [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1365016, 'name': ResetVM_Task, 'duration_secs': 0.104587} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 881.989655] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 881.989911] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquired lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 881.991178] env[62619]: DEBUG nova.network.neutron [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 881.991178] env[62619]: DEBUG nova.objects.instance [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lazy-loading 'info_cache' on Instance uuid ca452ef6-d777-46dd-a313-ae7dd441adca {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 881.995215] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Did hard reboot of VM {{(pid=62619) reboot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1067}} [ 881.995215] env[62619]: DEBUG nova.compute.manager [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 881.995215] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4a35835-f451-464b-9a38-d869535d5bb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.023478] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526a1ecf-1b18-3836-b13f-308d6dc7138e, 'name': SearchDatastore_Task, 'duration_secs': 0.020677} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.024527] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-294ca634-c4a8-46b6-87ed-193053197dc0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.030454] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 882.030454] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521e8ed2-d7fe-fde1-2541-f0402b1412f6" [ 882.030454] env[62619]: _type = "Task" [ 882.030454] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.045176] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365017, 'name': Rename_Task, 'duration_secs': 0.179014} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.049337] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 882.049880] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521e8ed2-d7fe-fde1-2541-f0402b1412f6, 'name': SearchDatastore_Task, 'duration_secs': 0.009663} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 882.050251] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9f6b9919-83bd-4c35-bc76-c53b679da374 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.051781] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 882.052070] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9/da5a8beb-0246-43df-9813-436ddf8598a9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 882.052335] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7ea01a76-92ef-4b7a-b7e7-4a9e6a7dcd97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.058329] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 882.058329] env[62619]: value = "task-1365018" [ 882.058329] env[62619]: _type = "Task" [ 882.058329] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.059573] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 882.059573] env[62619]: value = "task-1365019" [ 882.059573] env[62619]: _type = "Task" [ 882.059573] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 882.072720] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365018, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.076021] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365019, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.220082] env[62619]: INFO nova.compute.manager [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Took 24.32 seconds to build instance. [ 882.265272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4302101a-8cb5-46bd-8f5a-4a820531e058 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1c1b7717-30a9-40c9-913f-6d65a619b94a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 57.740s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.480955] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.867s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.484216] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 15.731s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.484216] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.484216] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 882.484630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 13.655s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 882.486028] env[62619]: INFO nova.compute.claims [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 882.491511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da70a3e-1615-4d60-9a20-6c609cd68d6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.494498] env[62619]: DEBUG nova.objects.base [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Object Instance lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 882.501441] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc77b0a7-f55f-44f9-b145-47ed06d6ce5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.520263] env[62619]: INFO nova.scheduler.client.report [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Deleted allocations for instance cfa74201-783f-4ef4-8860-e2f53e4dfb81 [ 882.522626] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44265b45-4612-4a9e-8939-06d91740f8ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.528359] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4b6f7fb8-a767-4599-b3af-c79510db42ac tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "f60e0bec-0811-4e91-bc45-b61874846497" "released" by "nova.compute.manager.ComputeManager.reboot_instance..do_reboot_instance" :: held 3.986s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 882.536411] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d092ced6-be73-417d-81ab-8f8f09330046 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.576024] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181464MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 882.576024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 882.599146] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365019, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.600336] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365018, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 882.636783] env[62619]: DEBUG nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 882.673874] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 882.674140] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 882.674305] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 882.674486] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 882.674958] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 882.674958] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 882.674958] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 882.675645] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 882.675857] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 882.676143] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 882.676222] env[62619]: DEBUG nova.virt.hardware [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 882.677410] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-761479e8-f975-40c7-bf05-fb608b5728ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.686661] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15c21749-b26c-4209-ab8c-9def61004607 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 882.725127] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5b4c783e-9300-4e26-a8e5-8da5e416e0b6 tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "fa4e9947-5b99-4447-9535-6dbcaba635f8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 41.263s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.039678] env[62619]: DEBUG oslo_concurrency.lockutils [None req-719d7676-cbdf-4bb1-a40b-ba50b2e0d24d tempest-InstanceActionsV221TestJSON-19957774 tempest-InstanceActionsV221TestJSON-19957774-project-member] Lock "cfa74201-783f-4ef4-8860-e2f53e4dfb81" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 21.586s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.064922] env[62619]: DEBUG nova.network.neutron [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Successfully updated port: a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 883.089256] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365019, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.576776} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.093555] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9/da5a8beb-0246-43df-9813-436ddf8598a9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 883.094130] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 883.094953] env[62619]: DEBUG oslo_vmware.api [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365018, 'name': PowerOnVM_Task, 'duration_secs': 0.548521} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.095328] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-b896b55c-84d8-4e68-afe6-2d3eaacc7977 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.097594] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 883.097925] env[62619]: INFO nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Took 11.19 seconds to spawn the instance on the hypervisor. [ 883.098242] env[62619]: DEBUG nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 883.100019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c473b5af-725f-4beb-95a3-7cccde9e1301 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.113384] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 883.113384] env[62619]: value = "task-1365020" [ 883.113384] env[62619]: _type = "Task" [ 883.113384] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.126112] env[62619]: DEBUG nova.compute.manager [req-e4bacfdc-e91f-4f6c-a3aa-989a7c9400fd req-b1684bb9-76d5-46e0-8905-1bb2b6b59606 service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Received event network-vif-plugged-a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 883.126112] env[62619]: DEBUG oslo_concurrency.lockutils [req-e4bacfdc-e91f-4f6c-a3aa-989a7c9400fd req-b1684bb9-76d5-46e0-8905-1bb2b6b59606 service nova] Acquiring lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.126112] env[62619]: DEBUG oslo_concurrency.lockutils [req-e4bacfdc-e91f-4f6c-a3aa-989a7c9400fd req-b1684bb9-76d5-46e0-8905-1bb2b6b59606 service nova] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.126112] env[62619]: DEBUG oslo_concurrency.lockutils [req-e4bacfdc-e91f-4f6c-a3aa-989a7c9400fd req-b1684bb9-76d5-46e0-8905-1bb2b6b59606 service nova] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.126112] env[62619]: DEBUG nova.compute.manager [req-e4bacfdc-e91f-4f6c-a3aa-989a7c9400fd req-b1684bb9-76d5-46e0-8905-1bb2b6b59606 service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] No waiting events found dispatching network-vif-plugged-a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 883.126297] env[62619]: WARNING nova.compute.manager [req-e4bacfdc-e91f-4f6c-a3aa-989a7c9400fd req-b1684bb9-76d5-46e0-8905-1bb2b6b59606 service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Received unexpected event network-vif-plugged-a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff for instance with vm_state building and task_state spawning. [ 883.135429] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365020, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.306373] env[62619]: DEBUG nova.network.neutron [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Updating instance_info_cache with network_info: [{"id": "6ecf8988-6ce6-4b80-b927-57c2ef3a8100", "address": "fa:16:3e:cc:c5:14", "network": {"id": "6633af1e-84fa-48b0-a6ec-6e3f85a9dbcb", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-401212399-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c226b0d107bd4abe8cd97f79bb2cc02a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e27fd35-1d7b-4358-92d5-4d34da27b992", "external-id": "nsx-vlan-transportzone-355", "segmentation_id": 355, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6ecf8988-6c", "ovs_interfaceid": "6ecf8988-6ce6-4b80-b927-57c2ef3a8100", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 883.410628] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "f60e0bec-0811-4e91-bc45-b61874846497" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.414472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "f60e0bec-0811-4e91-bc45-b61874846497" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.414472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "f60e0bec-0811-4e91-bc45-b61874846497-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 883.414472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "f60e0bec-0811-4e91-bc45-b61874846497-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 883.414472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "f60e0bec-0811-4e91-bc45-b61874846497-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 883.416876] env[62619]: INFO nova.compute.manager [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Terminating instance [ 883.425039] env[62619]: DEBUG nova.compute.manager [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 883.425039] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 883.425039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfcf5b7-fd74-4a15-9da9-5df588b18eac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.434017] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 883.434017] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d2ac41dd-058a-4030-94e5-10db187185c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.440267] env[62619]: DEBUG oslo_vmware.api [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 883.440267] env[62619]: value = "task-1365021" [ 883.440267] env[62619]: _type = "Task" [ 883.440267] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.449148] env[62619]: DEBUG oslo_vmware.api [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1365021, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.570039] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 883.570039] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 883.570238] env[62619]: DEBUG nova.network.neutron [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 883.626847] env[62619]: INFO nova.compute.manager [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Took 26.07 seconds to build instance. [ 883.633941] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365020, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068968} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.637024] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 883.638237] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c723d31-3c92-4d72-a446-10e585ef9ee6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.662663] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9/da5a8beb-0246-43df-9813-436ddf8598a9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 883.669103] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bf9f01ad-cff7-475a-ba3e-d23bf31c73d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.691179] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 883.691179] env[62619]: value = "task-1365022" [ 883.691179] env[62619]: _type = "Task" [ 883.691179] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 883.703031] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365022, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 883.768142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a6dbe48-5db6-450f-9017-be0b95ebc2b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.777605] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-768774b8-35e2-4e15-ae02-ad14da7d590d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.813875] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Releasing lock "refresh_cache-ca452ef6-d777-46dd-a313-ae7dd441adca" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 883.817624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3a1a8b6-82de-4310-a458-53b62ebfe15f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.826549] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0252c1a-cb49-4fca-b178-f5e3b9b33659 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 883.846772] env[62619]: DEBUG nova.compute.provider_tree [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 883.955090] env[62619]: DEBUG oslo_vmware.api [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1365021, 'name': PowerOffVM_Task, 'duration_secs': 0.420664} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 883.955090] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 883.955090] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 883.955090] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1bb33033-09ed-45ca-b9d3-699deca181ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.019239] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 884.022715] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 884.022715] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Deleting the datastore file [datastore2] f60e0bec-0811-4e91-bc45-b61874846497 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 884.022715] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-91dacdd2-299a-4043-ac4e-29d60b3253d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.027114] env[62619]: DEBUG oslo_vmware.api [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for the task: (returnval){ [ 884.027114] env[62619]: value = "task-1365024" [ 884.027114] env[62619]: _type = "Task" [ 884.027114] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.035365] env[62619]: DEBUG oslo_vmware.api [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1365024, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.108325] env[62619]: DEBUG nova.network.neutron [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 884.129811] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f4b8d45d-f795-4905-8da7-7a052ff2d20e tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 51.604s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 884.200432] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365022, 'name': ReconfigVM_Task, 'duration_secs': 0.422048} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.200694] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Reconfigured VM instance instance-0000004d to attach disk [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9/da5a8beb-0246-43df-9813-436ddf8598a9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 884.201396] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-186e7759-9546-4d70-a449-86d120739317 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.207714] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 884.207714] env[62619]: value = "task-1365025" [ 884.207714] env[62619]: _type = "Task" [ 884.207714] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.218781] env[62619]: DEBUG nova.compute.manager [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 884.219106] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365025, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.223163] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78d682e7-3668-449d-a483-f23d6d97c70b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.324035] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 884.324035] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15b50bfe-5830-4e32-a365-3a8d13d4ae02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.329550] env[62619]: DEBUG oslo_vmware.api [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 884.329550] env[62619]: value = "task-1365026" [ 884.329550] env[62619]: _type = "Task" [ 884.329550] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.343059] env[62619]: DEBUG oslo_vmware.api [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365026, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.366859] env[62619]: ERROR nova.scheduler.client.report [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [req-6fa5657a-69e8-4000-9d6b-851fc3e50ca5] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-6fa5657a-69e8-4000-9d6b-851fc3e50ca5"}]} [ 884.373456] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.373739] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.384981] env[62619]: DEBUG nova.scheduler.client.report [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 884.404495] env[62619]: DEBUG nova.scheduler.client.report [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 884.404863] env[62619]: DEBUG nova.compute.provider_tree [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.422086] env[62619]: DEBUG nova.scheduler.client.report [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 884.449806] env[62619]: DEBUG nova.scheduler.client.report [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 884.465539] env[62619]: DEBUG nova.network.neutron [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance_info_cache with network_info: [{"id": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "address": "fa:16:3e:d5:c3:bc", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0389ef1-cf", "ovs_interfaceid": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 884.539069] env[62619]: DEBUG oslo_vmware.api [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Task: {'id': task-1365024, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.318911} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.539354] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 884.540106] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 884.540106] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 884.540106] env[62619]: INFO nova.compute.manager [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Took 1.12 seconds to destroy the instance on the hypervisor. [ 884.540274] env[62619]: DEBUG oslo.service.loopingcall [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.540336] env[62619]: DEBUG nova.compute.manager [-] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 884.540426] env[62619]: DEBUG nova.network.neutron [-] [instance: f60e0bec-0811-4e91-bc45-b61874846497] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 884.653571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2088a1d5-e844-48e0-b855-5b267be65191 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.661375] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c42564d-0b56-4273-8bba-30bab9dbce06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.693169] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e7293e0-5e9a-47bb-a1e3-1ee368589fe4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.701251] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bb74f18-0189-4e3e-872e-509478d7585a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.716878] env[62619]: DEBUG nova.compute.provider_tree [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 884.727552] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365025, 'name': Rename_Task, 'duration_secs': 0.273586} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 884.727829] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 884.728094] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dff647aa-c8ca-4f3c-bfa6-61c577e35822 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.735894] env[62619]: INFO nova.compute.manager [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] instance snapshotting [ 884.738198] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 884.738198] env[62619]: value = "task-1365027" [ 884.738198] env[62619]: _type = "Task" [ 884.738198] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 884.738921] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990d6a88-833d-4a1a-a2cf-d9860dae6de2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.764147] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-32fd7336-274a-4dea-b2fb-8c7253f26ee2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.766739] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365027, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.794602] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "85e279da-e067-46f8-929b-87a013c4e7f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 884.794922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "85e279da-e067-46f8-929b-87a013c4e7f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 884.840069] env[62619]: DEBUG oslo_vmware.api [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365026, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 884.876891] env[62619]: DEBUG nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 884.968463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 884.968816] env[62619]: DEBUG nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Instance network_info: |[{"id": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "address": "fa:16:3e:d5:c3:bc", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0389ef1-cf", "ovs_interfaceid": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 884.969320] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:d5:c3:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 884.977828] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Creating folder: Project (c583f4e7b29743aabd3e96f7c53fa04f). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.978752] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6d83fdcb-5874-4776-bab1-c70a73790fcc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.989027] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Created folder: Project (c583f4e7b29743aabd3e96f7c53fa04f) in parent group-v290436. [ 884.989201] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Creating folder: Instances. Parent ref: group-v290501. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 884.989450] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e5968ae9-e3c3-4eb0-8511-fa05b0a919cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 884.998272] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Created folder: Instances in parent group-v290501. [ 884.998494] env[62619]: DEBUG oslo.service.loopingcall [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 884.998680] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 884.998884] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f2e9cce0-2112-4f56-8755-f7b27e377bce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.017442] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 885.017442] env[62619]: value = "task-1365030" [ 885.017442] env[62619]: _type = "Task" [ 885.017442] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.026117] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365030, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.056909] env[62619]: DEBUG nova.compute.manager [req-4da11975-2ac0-4ada-a0fa-8d3974e09f1c req-00a12764-24cb-4bfe-898b-74ee2df943f4 service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Received event network-vif-deleted-364b7b78-e7ea-45f8-986e-ee77f8b3fd35 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 885.057405] env[62619]: INFO nova.compute.manager [req-4da11975-2ac0-4ada-a0fa-8d3974e09f1c req-00a12764-24cb-4bfe-898b-74ee2df943f4 service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Neutron deleted interface 364b7b78-e7ea-45f8-986e-ee77f8b3fd35; detaching it from the instance and deleting it from the info cache [ 885.057405] env[62619]: DEBUG nova.network.neutron [req-4da11975-2ac0-4ada-a0fa-8d3974e09f1c req-00a12764-24cb-4bfe-898b-74ee2df943f4 service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.156393] env[62619]: DEBUG nova.compute.manager [req-8e156427-df7b-401f-8b1c-8b958a5e8410 req-b29e0cbc-64f8-433c-b639-a742014c68df service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Received event network-changed-a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 885.156583] env[62619]: DEBUG nova.compute.manager [req-8e156427-df7b-401f-8b1c-8b958a5e8410 req-b29e0cbc-64f8-433c-b639-a742014c68df service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Refreshing instance network info cache due to event network-changed-a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 885.156792] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e156427-df7b-401f-8b1c-8b958a5e8410 req-b29e0cbc-64f8-433c-b639-a742014c68df service nova] Acquiring lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.156937] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e156427-df7b-401f-8b1c-8b958a5e8410 req-b29e0cbc-64f8-433c-b639-a742014c68df service nova] Acquired lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.157130] env[62619]: DEBUG nova.network.neutron [req-8e156427-df7b-401f-8b1c-8b958a5e8410 req-b29e0cbc-64f8-433c-b639-a742014c68df service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Refreshing network info cache for port a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 885.252615] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365027, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.259069] env[62619]: DEBUG nova.scheduler.client.report [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 92 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 885.259381] env[62619]: DEBUG nova.compute.provider_tree [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 92 to 93 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 885.259642] env[62619]: DEBUG nova.compute.provider_tree [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 885.275554] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 885.275661] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-03e8adfc-1b57-49f5-ad55-48d05d9889fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.284738] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 885.284738] env[62619]: value = "task-1365031" [ 885.284738] env[62619]: _type = "Task" [ 885.284738] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.294960] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365031, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.297564] env[62619]: DEBUG nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 885.343765] env[62619]: DEBUG oslo_vmware.api [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365026, 'name': PowerOnVM_Task, 'duration_secs': 0.692431} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.344912] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 885.345998] env[62619]: DEBUG nova.compute.manager [None req-b7055c9a-e149-4ca9-96e6-e226572f3097 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 885.345998] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8c213b7-66c9-49a7-8eb7-675272df84d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.417722] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.464735] env[62619]: DEBUG nova.network.neutron [-] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 885.529301] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365030, 'name': CreateVM_Task, 'duration_secs': 0.352169} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.529492] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 885.530264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 885.530472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 885.532045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 885.532045] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-68c8f9b2-52c4-4c47-858e-4e6e1cb6975d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.537400] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 885.537400] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d33aa9-7c68-6d1b-94ea-c95f0ddccb73" [ 885.537400] env[62619]: _type = "Task" [ 885.537400] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 885.548898] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d33aa9-7c68-6d1b-94ea-c95f0ddccb73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.559716] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-31517b15-b318-4ee6-8b11-367f54b86237 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.569352] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3200b51d-a716-4951-b821-e183958fbb0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.601946] env[62619]: DEBUG nova.compute.manager [req-4da11975-2ac0-4ada-a0fa-8d3974e09f1c req-00a12764-24cb-4bfe-898b-74ee2df943f4 service nova] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Detach interface failed, port_id=364b7b78-e7ea-45f8-986e-ee77f8b3fd35, reason: Instance f60e0bec-0811-4e91-bc45-b61874846497 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 885.751652] env[62619]: DEBUG oslo_vmware.api [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365027, 'name': PowerOnVM_Task, 'duration_secs': 0.66094} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 885.752155] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 885.752303] env[62619]: INFO nova.compute.manager [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Took 5.44 seconds to spawn the instance on the hypervisor. [ 885.752476] env[62619]: DEBUG nova.compute.manager [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 885.753294] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-887d7813-03a3-4008-ae43-2a7087beebf7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 885.765470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.281s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 885.765952] env[62619]: DEBUG nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 885.768283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 14.847s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 885.769134] env[62619]: DEBUG nova.objects.instance [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lazy-loading 'resources' on Instance uuid 9fd66533-39ff-401d-81ef-f37eaceb3103 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 885.794983] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365031, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 885.832262] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 885.968588] env[62619]: INFO nova.compute.manager [-] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Took 1.43 seconds to deallocate network for instance. [ 886.005395] env[62619]: DEBUG nova.network.neutron [req-8e156427-df7b-401f-8b1c-8b958a5e8410 req-b29e0cbc-64f8-433c-b639-a742014c68df service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updated VIF entry in instance network info cache for port a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 886.005780] env[62619]: DEBUG nova.network.neutron [req-8e156427-df7b-401f-8b1c-8b958a5e8410 req-b29e0cbc-64f8-433c-b639-a742014c68df service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance_info_cache with network_info: [{"id": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "address": "fa:16:3e:d5:c3:bc", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0389ef1-cf", "ovs_interfaceid": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 886.049969] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d33aa9-7c68-6d1b-94ea-c95f0ddccb73, 'name': SearchDatastore_Task, 'duration_secs': 0.009579} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.050324] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.050564] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 886.050799] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 886.050972] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 886.051177] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 886.051884] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5d758282-7818-45c1-92e9-8edeede58c86 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.063165] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 886.063513] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 886.064189] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a61d872-7e94-4975-a971-f04112fc86a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.069744] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 886.069744] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52691a4c-3ec6-9022-36c9-12607e2f3686" [ 886.069744] env[62619]: _type = "Task" [ 886.069744] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.077984] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52691a4c-3ec6-9022-36c9-12607e2f3686, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.268773] env[62619]: INFO nova.compute.manager [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Took 24.50 seconds to build instance. [ 886.270819] env[62619]: DEBUG nova.compute.utils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 886.272303] env[62619]: DEBUG nova.objects.instance [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lazy-loading 'numa_topology' on Instance uuid 9fd66533-39ff-401d-81ef-f37eaceb3103 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 886.274343] env[62619]: DEBUG nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 886.274508] env[62619]: DEBUG nova.network.neutron [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 886.298719] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365031, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.374069] env[62619]: DEBUG nova.policy [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6c9caec5b34a4cd7a2f25d24bf371fde', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ef452b71f2d4d248f5d016b2076508f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 886.476986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 886.508953] env[62619]: DEBUG oslo_concurrency.lockutils [req-8e156427-df7b-401f-8b1c-8b958a5e8410 req-b29e0cbc-64f8-433c-b639-a742014c68df service nova] Releasing lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 886.580915] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52691a4c-3ec6-9022-36c9-12607e2f3686, 'name': SearchDatastore_Task, 'duration_secs': 0.009125} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.581966] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b6ecf841-32c4-4055-87c4-41d27a2a4bbd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.588069] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 886.588069] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526295bc-71cc-d6f9-9674-729126d4f5c7" [ 886.588069] env[62619]: _type = "Task" [ 886.588069] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 886.598677] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526295bc-71cc-d6f9-9674-729126d4f5c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 886.770871] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74bcb23b-ffb7-4f40-89d0-16566bbea46e tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "da5a8beb-0246-43df-9813-436ddf8598a9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 45.131s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 886.775958] env[62619]: DEBUG nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 886.778748] env[62619]: DEBUG nova.objects.base [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Object Instance<9fd66533-39ff-401d-81ef-f37eaceb3103> lazy-loaded attributes: resources,numa_topology {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 886.799440] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365031, 'name': CreateSnapshot_Task, 'duration_secs': 1.195452} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 886.799754] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 886.800794] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84943436-d9aa-48a6-b9f7-f4bf9711c29c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 886.847372] env[62619]: DEBUG nova.network.neutron [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Successfully created port: 539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 887.017197] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8b944ac-7f81-4733-bb31-469e78f32303 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.026364] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ae7509f-93b5-47f2-bc42-444ff826a67c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.058471] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27ab88b8-ea93-48c7-9d35-a598ee6bf0d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.066079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b637aa3-f634-4632-a9b6-233ccb9e419e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.079925] env[62619]: DEBUG nova.compute.provider_tree [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 887.097185] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526295bc-71cc-d6f9-9674-729126d4f5c7, 'name': SearchDatastore_Task, 'duration_secs': 0.010211} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 887.097450] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 887.097708] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 11542a9b-6556-4b4b-88fe-26c6be2969f6/11542a9b-6556-4b4b-88fe-26c6be2969f6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 887.097970] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c6ab2427-a6fa-4c9f-9d92-08b3a58781a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.105211] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 887.105211] env[62619]: value = "task-1365032" [ 887.105211] env[62619]: _type = "Task" [ 887.105211] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.113050] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365032, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.321137] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 887.325897] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-942172c9-e889-4ce7-8255-481facf076e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.335874] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 887.335874] env[62619]: value = "task-1365033" [ 887.335874] env[62619]: _type = "Task" [ 887.335874] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 887.345678] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365033, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.583573] env[62619]: DEBUG nova.scheduler.client.report [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 887.616251] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365032, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.791444] env[62619]: DEBUG nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 887.818401] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 887.819229] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 887.819440] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 887.819685] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 887.819917] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 887.820123] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 887.820403] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 887.820624] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 887.820920] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 887.821780] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 887.821780] env[62619]: DEBUG nova.virt.hardware [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 887.822422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74a97ccf-c55c-43a3-b7e2-ec31ea3df528 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.825710] env[62619]: INFO nova.compute.manager [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Rebuilding instance [ 887.833414] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b325eaf-a54d-43c4-bf43-0df8fee52cb1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 887.857406] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365033, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 887.883171] env[62619]: DEBUG nova.compute.manager [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 887.884085] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcfc405c-7b20-419d-a19c-5a2c7056dea0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.089551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.321s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.092278] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 15.597s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.094157] env[62619]: INFO nova.compute.claims [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 888.116354] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365032, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.530994} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.116613] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 11542a9b-6556-4b4b-88fe-26c6be2969f6/11542a9b-6556-4b4b-88fe-26c6be2969f6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 888.116828] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 888.117106] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f6e7e63d-c459-41fb-bc6d-361eb520287c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.124013] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 888.124013] env[62619]: value = "task-1365034" [ 888.124013] env[62619]: _type = "Task" [ 888.124013] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.133109] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365034, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.346979] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365033, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.395902] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.396754] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-41b6253d-ada6-4b7e-be18-e71cff126abc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.406255] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 888.406255] env[62619]: value = "task-1365035" [ 888.406255] env[62619]: _type = "Task" [ 888.406255] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.416259] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365035, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.606041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d6a555de-3504-49f9-a09a-bbcc05a95a68 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 39.214s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.607038] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 16.925s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.607172] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.607511] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.607511] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.609391] env[62619]: INFO nova.compute.manager [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Terminating instance [ 888.611203] env[62619]: DEBUG nova.compute.manager [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 888.611440] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.611713] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3bacb094-c13c-44b3-8623-2ed9bd7e3cd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.622802] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd8fcd81-43ae-4f77-a713-098b9fa2138a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.643719] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365034, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065327} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.643989] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 888.644776] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a3a4a00-8d1e-446d-9f09-895277858869 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.657670] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 9fd66533-39ff-401d-81ef-f37eaceb3103 could not be found. [ 888.657862] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 888.658157] env[62619]: INFO nova.compute.manager [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Took 0.05 seconds to destroy the instance on the hypervisor. [ 888.658340] env[62619]: DEBUG oslo.service.loopingcall [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 888.658958] env[62619]: DEBUG nova.compute.manager [-] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 888.659073] env[62619]: DEBUG nova.network.neutron [-] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 888.678166] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 11542a9b-6556-4b4b-88fe-26c6be2969f6/11542a9b-6556-4b4b-88fe-26c6be2969f6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 888.678822] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be94889f-ada6-419b-9f8a-b486056fa87a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.699879] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 888.699879] env[62619]: value = "task-1365036" [ 888.699879] env[62619]: _type = "Task" [ 888.699879] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.708735] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365036, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.717344] env[62619]: DEBUG nova.compute.manager [req-95baf33a-b504-4b85-932f-b204ae406255 req-bee6ec42-2d9e-46bc-8f57-d4f3fb2b9e9d service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Received event network-vif-plugged-539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 888.717650] env[62619]: DEBUG oslo_concurrency.lockutils [req-95baf33a-b504-4b85-932f-b204ae406255 req-bee6ec42-2d9e-46bc-8f57-d4f3fb2b9e9d service nova] Acquiring lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.717906] env[62619]: DEBUG oslo_concurrency.lockutils [req-95baf33a-b504-4b85-932f-b204ae406255 req-bee6ec42-2d9e-46bc-8f57-d4f3fb2b9e9d service nova] Lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.718178] env[62619]: DEBUG oslo_concurrency.lockutils [req-95baf33a-b504-4b85-932f-b204ae406255 req-bee6ec42-2d9e-46bc-8f57-d4f3fb2b9e9d service nova] Lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.718471] env[62619]: DEBUG nova.compute.manager [req-95baf33a-b504-4b85-932f-b204ae406255 req-bee6ec42-2d9e-46bc-8f57-d4f3fb2b9e9d service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] No waiting events found dispatching network-vif-plugged-539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 888.718741] env[62619]: WARNING nova.compute.manager [req-95baf33a-b504-4b85-932f-b204ae406255 req-bee6ec42-2d9e-46bc-8f57-d4f3fb2b9e9d service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Received unexpected event network-vif-plugged-539b4aac-fc64-4dc2-a020-cf6440bd00d8 for instance with vm_state building and task_state spawning. [ 888.832760] env[62619]: DEBUG nova.network.neutron [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Successfully updated port: 539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 888.850350] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365033, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.878702] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.879144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.879430] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 888.880314] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 888.880636] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 888.884736] env[62619]: INFO nova.compute.manager [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Terminating instance [ 888.888781] env[62619]: DEBUG nova.compute.manager [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 888.889084] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.890763] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f3085f7-9358-4fc8-a050-15a8465967b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.901995] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 888.902455] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8ea69b20-4a6e-4d92-b205-9ab0d1e754be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.913727] env[62619]: DEBUG oslo_vmware.api [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 888.913727] env[62619]: value = "task-1365037" [ 888.913727] env[62619]: _type = "Task" [ 888.913727] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.925406] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365035, 'name': PowerOffVM_Task, 'duration_secs': 0.282664} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 888.926186] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 888.926758] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 888.927677] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad939dcb-5dcd-4b3e-a4cc-fdbbaf34fd89 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.937185] env[62619]: DEBUG oslo_vmware.api [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365037, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 888.943886] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 888.945515] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-092e5468-9eb2-446a-84bb-92b556ee48be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.974333] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 888.974333] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 888.974563] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Deleting the datastore file [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 888.974901] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-4e493570-b5be-476c-b555-9a7b79551f37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 888.981924] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 888.981924] env[62619]: value = "task-1365039" [ 888.981924] env[62619]: _type = "Task" [ 888.981924] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 888.990079] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365039, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.215091] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365036, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.321770] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51354f44-65d1-4595-b9d9-95717648eb3a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.333266] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47db6ea0-83be-4b7f-b380-43576caa363c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.336345] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 889.336537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquired lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 889.336732] env[62619]: DEBUG nova.network.neutron [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 889.369705] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03d4fd3-906b-4d46-a5e7-92c8685c4bea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.375577] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365033, 'name': CloneVM_Task, 'duration_secs': 2.036504} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.376231] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Created linked-clone VM from snapshot [ 889.376960] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d0f7ea5-5994-4de8-8acc-a5ce70add867 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.383079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65aa22c8-a76f-4dc3-aadb-a2b61a0d6c21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.390489] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Uploading image 4be035b6-4033-4f5c-9c02-c3d1968a1982 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 889.402763] env[62619]: DEBUG nova.compute.provider_tree [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 889.412787] env[62619]: DEBUG oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 889.412787] env[62619]: value = "vm-290505" [ 889.412787] env[62619]: _type = "VirtualMachine" [ 889.412787] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 889.413343] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-9502a02b-aad4-4d3f-8ffc-6a60f69f14ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.423865] env[62619]: DEBUG oslo_vmware.api [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365037, 'name': PowerOffVM_Task, 'duration_secs': 0.259767} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.424994] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 889.425197] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 889.425509] env[62619]: DEBUG oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lease: (returnval){ [ 889.425509] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528bc358-de57-dbac-e273-a73d33f3d5b5" [ 889.425509] env[62619]: _type = "HttpNfcLease" [ 889.425509] env[62619]: } obtained for exporting VM: (result){ [ 889.425509] env[62619]: value = "vm-290505" [ 889.425509] env[62619]: _type = "VirtualMachine" [ 889.425509] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 889.425730] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the lease: (returnval){ [ 889.425730] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528bc358-de57-dbac-e273-a73d33f3d5b5" [ 889.425730] env[62619]: _type = "HttpNfcLease" [ 889.425730] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 889.425899] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-19a24677-0ee4-472b-9ed9-afbe4373ce19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.433609] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 889.433609] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528bc358-de57-dbac-e273-a73d33f3d5b5" [ 889.433609] env[62619]: _type = "HttpNfcLease" [ 889.433609] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 889.448978] env[62619]: DEBUG nova.network.neutron [-] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 889.492049] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365039, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158812} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.492361] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 889.492616] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 889.492800] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 889.501366] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 889.501581] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 889.501760] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleting the datastore file [datastore2] 055a1071-bd4b-4d1b-88c0-7551a07aee9a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 889.502202] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1f57018b-9c30-421f-bc01-bef3a2a363cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.509211] env[62619]: DEBUG oslo_vmware.api [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 889.509211] env[62619]: value = "task-1365042" [ 889.509211] env[62619]: _type = "Task" [ 889.509211] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.520805] env[62619]: DEBUG oslo_vmware.api [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365042, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.711117] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365036, 'name': ReconfigVM_Task, 'duration_secs': 0.541487} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 889.711415] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 11542a9b-6556-4b4b-88fe-26c6be2969f6/11542a9b-6556-4b4b-88fe-26c6be2969f6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 889.712490] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a68dc9cd-9e51-4a83-b379-d75b48e16539 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.717484] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 889.717484] env[62619]: value = "task-1365043" [ 889.717484] env[62619]: _type = "Task" [ 889.717484] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 889.724918] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365043, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 889.869637] env[62619]: DEBUG nova.network.neutron [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 889.905649] env[62619]: DEBUG nova.scheduler.client.report [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 889.935044] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 889.935044] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528bc358-de57-dbac-e273-a73d33f3d5b5" [ 889.935044] env[62619]: _type = "HttpNfcLease" [ 889.935044] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 889.935259] env[62619]: DEBUG oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 889.935259] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528bc358-de57-dbac-e273-a73d33f3d5b5" [ 889.935259] env[62619]: _type = "HttpNfcLease" [ 889.935259] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 889.935872] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d3c11b-bf91-47b0-8e97-4592db058f73 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 889.943351] env[62619]: DEBUG oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Found VMDK URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5209bc43-96e2-7b65-ce8d-52a82aac36cd/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 889.943527] env[62619]: DEBUG oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Opening URL: https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5209bc43-96e2-7b65-ce8d-52a82aac36cd/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 890.002631] env[62619]: INFO nova.compute.manager [-] [instance: 9fd66533-39ff-401d-81ef-f37eaceb3103] Took 1.34 seconds to deallocate network for instance. [ 890.027919] env[62619]: DEBUG oslo_vmware.api [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365042, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146674} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.028304] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 890.028563] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 890.028807] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 890.029071] env[62619]: INFO nova.compute.manager [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Took 1.14 seconds to destroy the instance on the hypervisor. [ 890.029423] env[62619]: DEBUG oslo.service.loopingcall [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.029688] env[62619]: DEBUG nova.compute.manager [-] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 890.029812] env[62619]: DEBUG nova.network.neutron [-] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 890.059326] env[62619]: DEBUG nova.network.neutron [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updating instance_info_cache with network_info: [{"id": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "address": "fa:16:3e:8a:b5:5d", "network": {"id": "774d2a24-62f2-476f-b188-55ab9c218904", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1966786016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4ef452b71f2d4d248f5d016b2076508f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539b4aac-fc", "ovs_interfaceid": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.176430] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-94bfc213-e20b-4bbd-a159-3e4546b21040 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.227750] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365043, 'name': Rename_Task, 'duration_secs': 0.133976} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 890.228182] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 890.228481] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ff34a6ec-ea77-4fb2-867c-521d01e6b649 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.234463] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 890.234463] env[62619]: value = "task-1365044" [ 890.234463] env[62619]: _type = "Task" [ 890.234463] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.244545] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365044, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.410496] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.318s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 890.411173] env[62619]: DEBUG nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 890.414792] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 7.839s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 890.541705] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 890.541989] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 890.542183] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 890.542431] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 890.542634] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 890.542838] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 890.543287] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 890.543529] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 890.543756] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 890.543969] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 890.544215] env[62619]: DEBUG nova.virt.hardware [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 890.545212] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796bcb79-20a4-445d-9516-d20d5f81a1c3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.556723] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e22cdc-1bf5-477a-9359-466a43d78d6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.573697] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Releasing lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 890.574118] env[62619]: DEBUG nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Instance network_info: |[{"id": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "address": "fa:16:3e:8a:b5:5d", "network": {"id": "774d2a24-62f2-476f-b188-55ab9c218904", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1966786016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4ef452b71f2d4d248f5d016b2076508f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539b4aac-fc", "ovs_interfaceid": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 890.575181] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Instance VIF info [] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.581673] env[62619]: DEBUG oslo.service.loopingcall [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.582411] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:b5:5d', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '40c947c4-f471-4d48-8e43-fee54198107e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '539b4aac-fc64-4dc2-a020-cf6440bd00d8', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 890.591797] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Creating folder: Project (4ef452b71f2d4d248f5d016b2076508f). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.592181] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.592459] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b5f5919e-92c6-426e-8128-c3457493d695 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.595050] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5f730373-9fcc-4a40-ac19-f0b72e87a99b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.615322] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.615322] env[62619]: value = "task-1365046" [ 890.615322] env[62619]: _type = "Task" [ 890.615322] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.621444] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Created folder: Project (4ef452b71f2d4d248f5d016b2076508f) in parent group-v290436. [ 890.622089] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Creating folder: Instances. Parent ref: group-v290506. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 890.623284] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-31a7dd9a-8c71-48d0-a895-d479a6779082 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.632970] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365046, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.643758] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Created folder: Instances in parent group-v290506. [ 890.644475] env[62619]: DEBUG oslo.service.loopingcall [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 890.644786] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 890.645184] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a906bc29-0f43-49e3-a910-cd52ee9e7390 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 890.677582] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 890.677582] env[62619]: value = "task-1365048" [ 890.677582] env[62619]: _type = "Task" [ 890.677582] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 890.691522] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365048, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.745744] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365044, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 890.775176] env[62619]: DEBUG nova.network.neutron [-] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 890.781698] env[62619]: DEBUG nova.compute.manager [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Received event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 890.782197] env[62619]: DEBUG nova.compute.manager [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing instance network info cache due to event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 890.782624] env[62619]: DEBUG oslo_concurrency.lockutils [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] Acquiring lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 890.783370] env[62619]: DEBUG oslo_concurrency.lockutils [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] Acquired lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 890.783370] env[62619]: DEBUG nova.network.neutron [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 890.916381] env[62619]: DEBUG nova.compute.utils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 890.918622] env[62619]: DEBUG nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 890.918979] env[62619]: DEBUG nova.network.neutron [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 890.993063] env[62619]: DEBUG nova.policy [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3041343376d4f2fad14577d5c412b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4270942193cd4a9aa397784368b9ae64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 891.054996] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26b25721-a7e3-4c12-bb13-6af6712994c6 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "9fd66533-39ff-401d-81ef-f37eaceb3103" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 2.448s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 891.125884] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365046, 'name': CreateVM_Task, 'duration_secs': 0.391471} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.126089] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.126881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.126881] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.127079] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 891.127285] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e0393e0f-08a0-442a-a761-b2e2ee54ac7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.133447] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 891.133447] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52867c68-cd29-58db-19c6-0e134afd6cc6" [ 891.133447] env[62619]: _type = "Task" [ 891.133447] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.142023] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52867c68-cd29-58db-19c6-0e134afd6cc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.193045] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365048, 'name': CreateVM_Task, 'duration_secs': 0.387383} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.193373] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 891.194314] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.244013] env[62619]: DEBUG oslo_vmware.api [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365044, 'name': PowerOnVM_Task, 'duration_secs': 0.554579} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.244444] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 891.244626] env[62619]: INFO nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Took 8.61 seconds to spawn the instance on the hypervisor. [ 891.244901] env[62619]: DEBUG nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 891.246313] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6a5375-cd2e-4d1e-9cd7-1c796a5feb9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.288748] env[62619]: INFO nova.compute.manager [-] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Took 1.26 seconds to deallocate network for instance. [ 891.334675] env[62619]: DEBUG nova.network.neutron [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Successfully created port: d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 891.429772] env[62619]: DEBUG nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 891.471241] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance c30e0db3-9b63-44b7-9b7f-810defc530d1 actively managed on this compute host and has allocations in placement: {'resources': {'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.471443] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 646b4ae6-09e1-4b3c-b17d-392e746df454 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.471567] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance ca452ef6-d777-46dd-a313-ae7dd441adca actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.471694] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 59b960b6-aa41-4409-a899-9829388c3ff2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.471821] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 055a1071-bd4b-4d1b-88c0-7551a07aee9a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.471985] env[62619]: WARNING nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance f60e0bec-0811-4e91-bc45-b61874846497 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 891.472116] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 1c1b7717-30a9-40c9-913f-6d65a619b94a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.472226] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.472337] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance fa4e9947-5b99-4447-9535-6dbcaba635f8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.472440] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance da5a8beb-0246-43df-9813-436ddf8598a9 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.472547] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 11542a9b-6556-4b4b-88fe-26c6be2969f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.472654] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 33998dc6-3be4-4b78-af12-0ad7bfab70c6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.472759] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 8c07697f-0e20-4ec5-88ec-ec4420906313 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 891.514233] env[62619]: DEBUG nova.network.neutron [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updated VIF entry in instance network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 891.514653] env[62619]: DEBUG nova.network.neutron [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updating instance_info_cache with network_info: [{"id": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "address": "fa:16:3e:8a:b5:5d", "network": {"id": "774d2a24-62f2-476f-b188-55ab9c218904", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1966786016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4ef452b71f2d4d248f5d016b2076508f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539b4aac-fc", "ovs_interfaceid": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 891.644119] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52867c68-cd29-58db-19c6-0e134afd6cc6, 'name': SearchDatastore_Task, 'duration_secs': 0.020244} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 891.644407] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 891.644662] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 891.644912] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 891.645072] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.645262] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 891.645556] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 891.645870] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 891.646222] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b6a1b642-3da4-4bcd-a504-8692c21e27b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.648232] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-869a6e67-002a-4f85-8f39-f421e46705e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.659231] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 891.659231] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ed8106-a46e-7a6a-c404-92a015942635" [ 891.659231] env[62619]: _type = "Task" [ 891.659231] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.659728] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 891.660478] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 891.662342] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5143fbbb-c7f6-4a9b-955f-dbcf1e8aca0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 891.678894] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ed8106-a46e-7a6a-c404-92a015942635, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.679266] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 891.679266] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5271392d-50c2-6299-8b2f-0c1de91bf73f" [ 891.679266] env[62619]: _type = "Task" [ 891.679266] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 891.688088] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5271392d-50c2-6299-8b2f-0c1de91bf73f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 891.768155] env[62619]: INFO nova.compute.manager [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Took 29.73 seconds to build instance. [ 891.800504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 891.976348] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 9f7d7830-b878-41b9-a236-f7cd5580cf1d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.017677] env[62619]: DEBUG oslo_concurrency.lockutils [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] Releasing lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.017958] env[62619]: DEBUG nova.compute.manager [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Received event network-vif-deleted-6b6b714f-2980-4b3f-b83a-26862818d2fb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 892.018275] env[62619]: INFO nova.compute.manager [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Neutron deleted interface 6b6b714f-2980-4b3f-b83a-26862818d2fb; detaching it from the instance and deleting it from the info cache [ 892.018464] env[62619]: DEBUG nova.network.neutron [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 892.173901] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ed8106-a46e-7a6a-c404-92a015942635, 'name': SearchDatastore_Task, 'duration_secs': 0.02019} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.174251] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.174502] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 892.174730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 892.188858] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5271392d-50c2-6299-8b2f-0c1de91bf73f, 'name': SearchDatastore_Task, 'duration_secs': 0.031832} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.189691] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8f7a9d8b-e5c6-47a8-9793-900b0e02afe1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.195515] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 892.195515] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5259493e-cf31-75a0-c161-391aa2910842" [ 892.195515] env[62619]: _type = "Task" [ 892.195515] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.205529] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5259493e-cf31-75a0-c161-391aa2910842, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.271426] env[62619]: DEBUG oslo_concurrency.lockutils [None req-37e98dde-195e-4947-845c-0d8ce4ff0c14 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 33.167s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 892.450346] env[62619]: DEBUG nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 892.472951] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 892.473391] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 892.473585] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 892.473811] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 892.474131] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 892.474400] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 892.474722] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 892.474910] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 892.475118] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 892.475411] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 892.475684] env[62619]: DEBUG nova.virt.hardware [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 892.476950] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ed398b-d533-40c6-888d-2d2f5908c15b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.481284] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 85e279da-e067-46f8-929b-87a013c4e7f4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 892.481645] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 12 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 892.481866] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2880MB phys_disk=200GB used_disk=11GB total_vcpus=48 used_vcpus=12 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 892.491933] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-020a1721-0bfa-4320-92b1-35d87db6f8e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.522022] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ef8aed87-565b-4c3e-80f8-6a635a31f6ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.533946] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415b051b-5a90-4979-9b4d-2d7a272a78fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.570768] env[62619]: DEBUG nova.compute.manager [req-98b61e6f-e6bf-45f1-b0cc-70cc155079ab req-bba1b1c5-0315-43b9-aa57-75fbb917f825 service nova] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Detach interface failed, port_id=6b6b714f-2980-4b3f-b83a-26862818d2fb, reason: Instance 055a1071-bd4b-4d1b-88c0-7551a07aee9a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 892.707025] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5259493e-cf31-75a0-c161-391aa2910842, 'name': SearchDatastore_Task, 'duration_secs': 0.012119} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.709617] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.709894] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9/da5a8beb-0246-43df-9813-436ddf8598a9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 892.711119] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 892.711119] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 892.711119] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b7af103f-72ea-446d-b762-c6b436049395 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.712772] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39c90797-e7fc-4822-b285-0f3546dfd974 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.722404] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f33639a-74ce-4e8d-937a-a219974df6e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.726529] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 892.726720] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 892.727520] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 892.727520] env[62619]: value = "task-1365049" [ 892.727520] env[62619]: _type = "Task" [ 892.727520] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.728070] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc3be67e-371a-4e22-b9ea-515944a82ce9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.736554] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60c1d27-2aab-4cde-bca7-03089e5ab389 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.744052] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365049, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.744404] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 892.744404] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525b08f9-d2ec-d4f7-37ae-28fdd694d946" [ 892.744404] env[62619]: _type = "Task" [ 892.744404] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.775626] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70c00ac5-3ded-48df-bf0a-24aec6f1e0b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.782609] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525b08f9-d2ec-d4f7-37ae-28fdd694d946, 'name': SearchDatastore_Task, 'duration_secs': 0.013135} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.783809] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-23cd4568-1322-4bcb-8968-a264be2b749c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.789889] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ce1c108-0f6f-4ea5-9acc-7366a2e5643f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.795274] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 892.795274] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526d52ba-7067-1403-9ce8-759a912e2ec0" [ 892.795274] env[62619]: _type = "Task" [ 892.795274] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.806820] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 892.814279] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526d52ba-7067-1403-9ce8-759a912e2ec0, 'name': SearchDatastore_Task, 'duration_secs': 0.013827} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 892.814556] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 892.814847] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6/33998dc6-3be4-4b78-af12-0ad7bfab70c6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 892.815199] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c0461f51-b78c-403f-921e-416a0d2e73ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 892.824782] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 892.824782] env[62619]: value = "task-1365050" [ 892.824782] env[62619]: _type = "Task" [ 892.824782] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 892.835526] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365050, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 892.855315] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 892.855553] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.028305] env[62619]: DEBUG nova.compute.manager [req-21b6e370-4c58-447e-90a4-6af711bcfe62 req-b3989fd0-276d-42c2-bc13-c234b3dfad90 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received event network-vif-plugged-d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 893.028305] env[62619]: DEBUG oslo_concurrency.lockutils [req-21b6e370-4c58-447e-90a4-6af711bcfe62 req-b3989fd0-276d-42c2-bc13-c234b3dfad90 service nova] Acquiring lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 893.029110] env[62619]: DEBUG oslo_concurrency.lockutils [req-21b6e370-4c58-447e-90a4-6af711bcfe62 req-b3989fd0-276d-42c2-bc13-c234b3dfad90 service nova] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 893.029533] env[62619]: DEBUG oslo_concurrency.lockutils [req-21b6e370-4c58-447e-90a4-6af711bcfe62 req-b3989fd0-276d-42c2-bc13-c234b3dfad90 service nova] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 893.029921] env[62619]: DEBUG nova.compute.manager [req-21b6e370-4c58-447e-90a4-6af711bcfe62 req-b3989fd0-276d-42c2-bc13-c234b3dfad90 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] No waiting events found dispatching network-vif-plugged-d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 893.030381] env[62619]: WARNING nova.compute.manager [req-21b6e370-4c58-447e-90a4-6af711bcfe62 req-b3989fd0-276d-42c2-bc13-c234b3dfad90 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received unexpected event network-vif-plugged-d70279ce-58c0-45c4-9a74-8f1f74552d21 for instance with vm_state building and task_state spawning. [ 893.120655] env[62619]: DEBUG nova.network.neutron [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Successfully updated port: d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 893.241342] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365049, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.337061] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365050, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.340296] env[62619]: ERROR nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [req-5a6f258d-e927-4603-a89a-499ca990b25e] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-5a6f258d-e927-4603-a89a-499ca990b25e"}]} [ 893.354648] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 893.357737] env[62619]: DEBUG nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 893.368879] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 893.369112] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 893.381178] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 893.401204] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 893.622822] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 893.623051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 893.623261] env[62619]: DEBUG nova.network.neutron [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 893.658180] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3be41e0-1f1e-4dac-a8a1-e2aa70edaadb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.666965] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00316e60-0a17-4fc9-bd89-68d6eecbb0ca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.699459] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63a64c9b-e246-42c0-880c-8e77054dbbc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.709035] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e834e2ab-1f34-4615-badf-412ca770fbdf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.724019] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 893.725666] env[62619]: DEBUG nova.compute.manager [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 893.741843] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365049, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.676402} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.742206] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9/da5a8beb-0246-43df-9813-436ddf8598a9.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 893.742431] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 893.742705] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e88b672c-8b59-4c9e-adce-69a3035b733d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.751058] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 893.751058] env[62619]: value = "task-1365051" [ 893.751058] env[62619]: _type = "Task" [ 893.751058] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.762262] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365051, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.836703] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365050, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.848425} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 893.836985] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6/33998dc6-3be4-4b78-af12-0ad7bfab70c6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 893.837228] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 893.837489] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-eb0abe67-1f08-4ff6-a3c8-93079018ff85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 893.845230] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 893.845230] env[62619]: value = "task-1365052" [ 893.845230] env[62619]: _type = "Task" [ 893.845230] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 893.853547] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365052, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 893.879129] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.155817] env[62619]: DEBUG nova.network.neutron [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 894.233599] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 894.251357] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 894.264940] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365051, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.072968} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.265102] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.265844] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c38b3b7-01d9-444d-8aa7-5a449c6b67fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.287551] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Reconfiguring VM instance instance-0000004d to attach disk [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9/da5a8beb-0246-43df-9813-436ddf8598a9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.287901] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6d91cd8-db14-4b87-8758-f3bf865d3925 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.308290] env[62619]: DEBUG nova.network.neutron [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 894.317781] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 894.317781] env[62619]: value = "task-1365053" [ 894.317781] env[62619]: _type = "Task" [ 894.317781] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.332145] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365053, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.356565] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365052, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.075964} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.356565] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 894.357107] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c70a0c0-4bed-4616-999e-95c6842cbdd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.381179] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6/33998dc6-3be4-4b78-af12-0ad7bfab70c6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 894.381560] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-113d5aaa-24db-4732-83bf-2013bb4937b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.405264] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 894.405264] env[62619]: value = "task-1365054" [ 894.405264] env[62619]: _type = "Task" [ 894.405264] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.413836] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365054, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.738667] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 894.738944] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 4.324s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 894.739299] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 9.326s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 894.741036] env[62619]: INFO nova.compute.claims [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 894.811242] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 894.811642] env[62619]: DEBUG nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Instance network_info: |[{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 894.812162] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:7b:59:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ba866c99-1cb2-4588-9f76-4bc0421ed46a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'd70279ce-58c0-45c4-9a74-8f1f74552d21', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 894.820290] env[62619]: DEBUG oslo.service.loopingcall [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 894.820627] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 894.823984] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-162f3e54-de2f-43b0-9c49-208d032d032f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.846238] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365053, 'name': ReconfigVM_Task, 'duration_secs': 0.393671} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.847587] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Reconfigured VM instance instance-0000004d to attach disk [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9/da5a8beb-0246-43df-9813-436ddf8598a9.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.848189] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 894.848189] env[62619]: value = "task-1365055" [ 894.848189] env[62619]: _type = "Task" [ 894.848189] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.848384] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-225d1cd3-72cb-425b-a2f2-d2a5c5c84ae0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.859479] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365055, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.860986] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 894.860986] env[62619]: value = "task-1365056" [ 894.860986] env[62619]: _type = "Task" [ 894.860986] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.870096] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365056, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 894.915127] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365054, 'name': ReconfigVM_Task, 'duration_secs': 0.397036} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 894.915465] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6/33998dc6-3be4-4b78-af12-0ad7bfab70c6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 894.916138] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-08d7de52-c39c-40b1-b49d-11fbea66d016 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 894.925483] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 894.925483] env[62619]: value = "task-1365057" [ 894.925483] env[62619]: _type = "Task" [ 894.925483] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 894.935599] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365057, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.084601] env[62619]: DEBUG nova.compute.manager [req-26e6690c-08e8-47d6-a5f0-c403a388de5a req-f2fb134d-94e4-413a-a2fa-ec80b39ecbf1 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received event network-changed-d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 895.084773] env[62619]: DEBUG nova.compute.manager [req-26e6690c-08e8-47d6-a5f0-c403a388de5a req-f2fb134d-94e4-413a-a2fa-ec80b39ecbf1 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing instance network info cache due to event network-changed-d70279ce-58c0-45c4-9a74-8f1f74552d21. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 895.085246] env[62619]: DEBUG oslo_concurrency.lockutils [req-26e6690c-08e8-47d6-a5f0-c403a388de5a req-f2fb134d-94e4-413a-a2fa-ec80b39ecbf1 service nova] Acquiring lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.085540] env[62619]: DEBUG oslo_concurrency.lockutils [req-26e6690c-08e8-47d6-a5f0-c403a388de5a req-f2fb134d-94e4-413a-a2fa-ec80b39ecbf1 service nova] Acquired lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.085752] env[62619]: DEBUG nova.network.neutron [req-26e6690c-08e8-47d6-a5f0-c403a388de5a req-f2fb134d-94e4-413a-a2fa-ec80b39ecbf1 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing network info cache for port d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 895.359497] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365055, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.371262] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365056, 'name': Rename_Task, 'duration_secs': 0.257131} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.371610] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.371904] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-268f3ad2-a8ac-48c8-8f6b-caf848a91623 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.380438] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 895.380438] env[62619]: value = "task-1365058" [ 895.380438] env[62619]: _type = "Task" [ 895.380438] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.390494] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365058, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.436966] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365057, 'name': Rename_Task, 'duration_secs': 0.27202} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.437301] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 895.437603] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4e7233b7-318a-40e0-9d88-d7277d72c2b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.446196] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 895.446196] env[62619]: value = "task-1365059" [ 895.446196] env[62619]: _type = "Task" [ 895.446196] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.459217] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365059, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.822373] env[62619]: DEBUG nova.network.neutron [req-26e6690c-08e8-47d6-a5f0-c403a388de5a req-f2fb134d-94e4-413a-a2fa-ec80b39ecbf1 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updated VIF entry in instance network info cache for port d70279ce-58c0-45c4-9a74-8f1f74552d21. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 895.822798] env[62619]: DEBUG nova.network.neutron [req-26e6690c-08e8-47d6-a5f0-c403a388de5a req-f2fb134d-94e4-413a-a2fa-ec80b39ecbf1 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 895.867402] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365055, 'name': CreateVM_Task, 'duration_secs': 0.531586} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.867582] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 895.868478] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.868669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.868997] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 895.869780] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3656e12d-9aa4-4f54-afc8-dbe5d8d8dcb9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.880640] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 895.880640] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5280845f-c9b7-bd9d-66d0-e568e32e51bc" [ 895.880640] env[62619]: _type = "Task" [ 895.880640] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.900571] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365058, 'name': PowerOnVM_Task} progress is 87%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.900850] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5280845f-c9b7-bd9d-66d0-e568e32e51bc, 'name': SearchDatastore_Task, 'duration_secs': 0.015298} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 895.901220] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 895.901497] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 895.902280] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 895.902477] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 895.902688] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 895.902979] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2e06a8ee-653b-4bc3-bb71-44150530677c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.914450] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 895.914650] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 895.915435] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-70edc94a-0d3b-4078-94f6-e8c2177c6c9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 895.921820] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 895.921820] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528d4b9c-e99e-f399-d3cb-1fe04dcf2cca" [ 895.921820] env[62619]: _type = "Task" [ 895.921820] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 895.934113] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528d4b9c-e99e-f399-d3cb-1fe04dcf2cca, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 895.957142] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365059, 'name': PowerOnVM_Task} progress is 1%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.008688] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-188432d9-d4e2-44b5-bbb6-4a924d113ed4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.017193] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eff72f21-91b9-4701-8167-050d236cadbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.049730] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-355fa639-dad0-47c4-99aa-59860a0ced4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.058388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9433aa2c-ca3e-4b34-ade9-5fc3f981e72c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.073773] env[62619]: DEBUG nova.compute.provider_tree [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 896.326480] env[62619]: DEBUG oslo_concurrency.lockutils [req-26e6690c-08e8-47d6-a5f0-c403a388de5a req-f2fb134d-94e4-413a-a2fa-ec80b39ecbf1 service nova] Releasing lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.391672] env[62619]: DEBUG oslo_vmware.api [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365058, 'name': PowerOnVM_Task, 'duration_secs': 0.911312} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.392053] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 896.392202] env[62619]: DEBUG nova.compute.manager [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 896.393053] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01411cf4-9b2a-41fd-bd58-fc20a312aca5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.433690] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528d4b9c-e99e-f399-d3cb-1fe04dcf2cca, 'name': SearchDatastore_Task, 'duration_secs': 0.020437} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.435131] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c445bcdb-6dfd-4739-b097-8dd4cabc9ba9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.441556] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 896.441556] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52444e55-7d61-5aa9-34ac-7ddf507f89a6" [ 896.441556] env[62619]: _type = "Task" [ 896.441556] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.450064] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52444e55-7d61-5aa9-34ac-7ddf507f89a6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.458474] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365059, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.578043] env[62619]: DEBUG nova.scheduler.client.report [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 896.911037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 896.954018] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52444e55-7d61-5aa9-34ac-7ddf507f89a6, 'name': SearchDatastore_Task, 'duration_secs': 0.017254} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 896.957231] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 896.958026] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 8c07697f-0e20-4ec5-88ec-ec4420906313/8c07697f-0e20-4ec5-88ec-ec4420906313.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 896.958026] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-32c736eb-f432-4d07-8d81-3507789cbb93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 896.966379] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365059, 'name': PowerOnVM_Task} progress is 82%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 896.967840] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 896.967840] env[62619]: value = "task-1365060" [ 896.967840] env[62619]: _type = "Task" [ 896.967840] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 896.977841] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365060, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.082496] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.083222] env[62619]: DEBUG nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 897.086614] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 11.255s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.088337] env[62619]: INFO nova.compute.claims [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 897.465854] env[62619]: DEBUG oslo_vmware.api [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365059, 'name': PowerOnVM_Task, 'duration_secs': 1.637905} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.466385] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 897.466440] env[62619]: INFO nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Took 9.67 seconds to spawn the instance on the hypervisor. [ 897.466575] env[62619]: DEBUG nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 897.467468] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d923a5af-1e51-499c-a142-cc1967c287fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.487727] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365060, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 897.593793] env[62619]: DEBUG nova.compute.utils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 897.597237] env[62619]: DEBUG nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 897.597469] env[62619]: DEBUG nova.network.neutron [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 897.649372] env[62619]: DEBUG nova.policy [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3581d242034a4c8b00417545bd52ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61b5e689e5544e6857baf8d3c52fe0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 897.981234] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365060, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.6131} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 897.981716] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 8c07697f-0e20-4ec5-88ec-ec4420906313/8c07697f-0e20-4ec5-88ec-ec4420906313.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 897.981716] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 897.982056] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-5553833b-19e9-47c3-8693-e40a6b4a82b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 897.984271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "da5a8beb-0246-43df-9813-436ddf8598a9" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.984484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "da5a8beb-0246-43df-9813-436ddf8598a9" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.984600] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "da5a8beb-0246-43df-9813-436ddf8598a9-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 897.984788] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "da5a8beb-0246-43df-9813-436ddf8598a9-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 897.984957] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "da5a8beb-0246-43df-9813-436ddf8598a9-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 897.990267] env[62619]: INFO nova.compute.manager [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Terminating instance [ 897.997021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "refresh_cache-da5a8beb-0246-43df-9813-436ddf8598a9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 897.997021] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired lock "refresh_cache-da5a8beb-0246-43df-9813-436ddf8598a9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 897.997021] env[62619]: DEBUG nova.network.neutron [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 897.998334] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 897.998334] env[62619]: value = "task-1365061" [ 897.998334] env[62619]: _type = "Task" [ 897.998334] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.011693] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365061, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.013957] env[62619]: INFO nova.compute.manager [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Took 29.21 seconds to build instance. [ 898.098184] env[62619]: DEBUG nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 898.345913] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a431cd0-5c0e-44a7-ad8e-ca16c3f66899 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.354087] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e4f1344-f994-4c5a-b669-40c4a970eb12 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.386211] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8319bcb6-e92e-4e5c-903a-0f9eb291f006 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.396973] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35d8e75f-9fa4-4861-be56-c5b9feef91b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.404437] env[62619]: DEBUG nova.network.neutron [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Successfully created port: b2d0f2bc-db7c-485c-8126-98219c38a4fd {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 898.420029] env[62619]: DEBUG nova.compute.provider_tree [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 898.510965] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365061, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.070225} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 898.512580] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 898.512580] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2861c3aa-d366-4f3f-91b6-82ab5ce2d94b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.516405] env[62619]: DEBUG oslo_concurrency.lockutils [None req-43613d6b-3e2b-47b6-8853-156c395339a9 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 35.529s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 898.535170] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Reconfiguring VM instance instance-00000050 to attach disk [datastore2] 8c07697f-0e20-4ec5-88ec-ec4420906313/8c07697f-0e20-4ec5-88ec-ec4420906313.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 898.536279] env[62619]: DEBUG nova.network.neutron [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 898.538049] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6dd4c57-d46e-4d41-a702-27454ba262b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 898.561339] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 898.561339] env[62619]: value = "task-1365062" [ 898.561339] env[62619]: _type = "Task" [ 898.561339] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 898.575918] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365062, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 898.613984] env[62619]: DEBUG nova.network.neutron [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 898.652077] env[62619]: INFO nova.compute.manager [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Rescuing [ 898.652945] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 898.652945] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquired lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 898.653107] env[62619]: DEBUG nova.network.neutron [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 898.924399] env[62619]: DEBUG nova.scheduler.client.report [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 899.036963] env[62619]: DEBUG oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Getting lease state for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5209bc43-96e2-7b65-ce8d-52a82aac36cd/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 899.037449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e7e919-a742-40f9-8596-2ee3cafe545e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.044877] env[62619]: DEBUG oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5209bc43-96e2-7b65-ce8d-52a82aac36cd/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 899.045069] env[62619]: ERROR oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Aborting lease for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5209bc43-96e2-7b65-ce8d-52a82aac36cd/disk-0.vmdk due to incomplete transfer. [ 899.045337] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-8c48f337-7063-4cbc-962d-c0daa14fa68a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.055817] env[62619]: DEBUG oslo_vmware.rw_handles [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Closed VMDK read handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap/nfc/5209bc43-96e2-7b65-ce8d-52a82aac36cd/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 899.055880] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Uploaded image 4be035b6-4033-4f5c-9c02-c3d1968a1982 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 899.058264] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 899.058606] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-2535908a-8705-41b9-a3eb-4703d260ef27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.067029] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 899.067029] env[62619]: value = "task-1365063" [ 899.067029] env[62619]: _type = "Task" [ 899.067029] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.073722] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365062, 'name': ReconfigVM_Task, 'duration_secs': 0.459487} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.074437] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Reconfigured VM instance instance-00000050 to attach disk [datastore2] 8c07697f-0e20-4ec5-88ec-ec4420906313/8c07697f-0e20-4ec5-88ec-ec4420906313.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 899.075088] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-eac705ba-1604-44fd-8d23-00b8f022d2e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.081968] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365063, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.083500] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 899.083500] env[62619]: value = "task-1365064" [ 899.083500] env[62619]: _type = "Task" [ 899.083500] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.092760] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365064, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.110555] env[62619]: DEBUG nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 899.117332] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Releasing lock "refresh_cache-da5a8beb-0246-43df-9813-436ddf8598a9" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.117332] env[62619]: DEBUG nova.compute.manager [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 899.117332] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 899.118344] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b0cb276-25f2-4d85-a372-44c49fe20baf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.126576] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 899.128907] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d87e1383-42f1-4ead-889f-cf14622bef36 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.137176] env[62619]: DEBUG oslo_vmware.api [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 899.137176] env[62619]: value = "task-1365065" [ 899.137176] env[62619]: _type = "Task" [ 899.137176] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.141962] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 899.141962] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 899.142164] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 899.142237] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 899.142481] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 899.142686] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 899.142945] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 899.143326] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 899.143471] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 899.143665] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 899.143847] env[62619]: DEBUG nova.virt.hardware [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 899.144810] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071e0f67-87b6-4c1e-9cfb-3996abda8587 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.163098] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b0da560-8d1e-4776-a841-b95918140989 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.170656] env[62619]: DEBUG oslo_vmware.api [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365065, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.385846] env[62619]: DEBUG nova.network.neutron [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updating instance_info_cache with network_info: [{"id": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "address": "fa:16:3e:8a:b5:5d", "network": {"id": "774d2a24-62f2-476f-b188-55ab9c218904", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1966786016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4ef452b71f2d4d248f5d016b2076508f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539b4aac-fc", "ovs_interfaceid": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 899.429989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.343s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.430675] env[62619]: DEBUG nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 899.434113] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 12.957s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.434398] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.436375] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.636s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.436980] env[62619]: DEBUG nova.objects.instance [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lazy-loading 'resources' on Instance uuid 055a1071-bd4b-4d1b-88c0-7551a07aee9a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 899.466598] env[62619]: INFO nova.scheduler.client.report [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Deleted allocations for instance f60e0bec-0811-4e91-bc45-b61874846497 [ 899.578142] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365063, 'name': Destroy_Task, 'duration_secs': 0.422896} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.578438] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Destroyed the VM [ 899.578675] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 899.578930] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-50bbe207-2a6f-451d-b254-ca0ca9071914 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.588383] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 899.588383] env[62619]: value = "task-1365066" [ 899.588383] env[62619]: _type = "Task" [ 899.588383] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.595635] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365064, 'name': Rename_Task, 'duration_secs': 0.214857} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.596337] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 899.596598] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-3f45036f-a1c0-4a7f-8a31-3600b8a3ec4c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.601245] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365066, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.604453] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 899.604453] env[62619]: value = "task-1365067" [ 899.604453] env[62619]: _type = "Task" [ 899.604453] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.620968] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365067, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.654070] env[62619]: DEBUG oslo_vmware.api [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365065, 'name': PowerOffVM_Task, 'duration_secs': 0.172979} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 899.654384] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 899.654560] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 899.654835] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-451472ed-157c-4424-b538-d74baeec0a7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.689734] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 899.690052] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 899.690363] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Deleting the datastore file [datastore2] da5a8beb-0246-43df-9813-436ddf8598a9 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 899.691892] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6723c0f4-7fb9-46e8-85f3-4a3d4bb372a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.705847] env[62619]: DEBUG oslo_vmware.api [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 899.705847] env[62619]: value = "task-1365069" [ 899.705847] env[62619]: _type = "Task" [ 899.705847] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 899.722031] env[62619]: DEBUG oslo_vmware.api [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365069, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 899.722031] env[62619]: INFO nova.compute.manager [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Rebuilding instance [ 899.775811] env[62619]: DEBUG nova.compute.manager [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 899.776769] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26616f7b-b93a-4eb4-9115-4499c75e28e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 899.892687] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Releasing lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 899.925904] env[62619]: DEBUG nova.compute.manager [req-8ce469cb-04b4-4d46-a101-86c82e19ffb4 req-361fb13e-c4b3-43d6-b9e8-0e88e9483708 service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Received event network-vif-plugged-b2d0f2bc-db7c-485c-8126-98219c38a4fd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 899.926214] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce469cb-04b4-4d46-a101-86c82e19ffb4 req-361fb13e-c4b3-43d6-b9e8-0e88e9483708 service nova] Acquiring lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 899.926473] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce469cb-04b4-4d46-a101-86c82e19ffb4 req-361fb13e-c4b3-43d6-b9e8-0e88e9483708 service nova] Lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 899.926654] env[62619]: DEBUG oslo_concurrency.lockutils [req-8ce469cb-04b4-4d46-a101-86c82e19ffb4 req-361fb13e-c4b3-43d6-b9e8-0e88e9483708 service nova] Lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 899.926850] env[62619]: DEBUG nova.compute.manager [req-8ce469cb-04b4-4d46-a101-86c82e19ffb4 req-361fb13e-c4b3-43d6-b9e8-0e88e9483708 service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] No waiting events found dispatching network-vif-plugged-b2d0f2bc-db7c-485c-8126-98219c38a4fd {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 899.927317] env[62619]: WARNING nova.compute.manager [req-8ce469cb-04b4-4d46-a101-86c82e19ffb4 req-361fb13e-c4b3-43d6-b9e8-0e88e9483708 service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Received unexpected event network-vif-plugged-b2d0f2bc-db7c-485c-8126-98219c38a4fd for instance with vm_state building and task_state spawning. [ 899.942488] env[62619]: DEBUG nova.compute.utils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 899.945292] env[62619]: DEBUG nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 899.947538] env[62619]: DEBUG nova.network.neutron [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 899.976730] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bf693b05-133a-4714-ada4-d045d5e72cad tempest-InstanceActionsTestJSON-1048141677 tempest-InstanceActionsTestJSON-1048141677-project-member] Lock "f60e0bec-0811-4e91-bc45-b61874846497" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 16.565s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 900.006150] env[62619]: DEBUG nova.policy [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5034ec8e3d3f4cc79e13528e3bf31167', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '578df9b6434d416fbae5f3cf2c33ef1b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 900.103667] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365066, 'name': RemoveSnapshot_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.113543] env[62619]: DEBUG nova.network.neutron [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Successfully updated port: b2d0f2bc-db7c-485c-8126-98219c38a4fd {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 900.120336] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365067, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.213577] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a32604d5-19f6-485e-b929-61bbba417322 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.224351] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88c3398b-ab1f-495c-9e47-8bc7e46d03d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.229371] env[62619]: DEBUG oslo_vmware.api [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365069, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.173438} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.229681] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 900.229886] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 900.230085] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 900.230416] env[62619]: INFO nova.compute.manager [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Took 1.11 seconds to destroy the instance on the hypervisor. [ 900.230692] env[62619]: DEBUG oslo.service.loopingcall [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 900.231456] env[62619]: DEBUG nova.compute.manager [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 900.231555] env[62619]: DEBUG nova.network.neutron [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 900.263429] env[62619]: DEBUG nova.network.neutron [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 900.266202] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b9d623e-a541-4099-bc25-21cf510adf6f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.276359] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-458c1d66-f2b4-4e98-8ddb-f24f187635c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.295133] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.295672] env[62619]: DEBUG nova.compute.provider_tree [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 900.296961] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4070d093-6d6e-4a10-bc69-63c7db5c4372 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.305875] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 900.305875] env[62619]: value = "task-1365070" [ 900.305875] env[62619]: _type = "Task" [ 900.305875] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.321027] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1365070, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.430394] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.430883] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d07fb0a2-8500-4d62-95af-404bd7c91743 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.439141] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 900.439141] env[62619]: value = "task-1365071" [ 900.439141] env[62619]: _type = "Task" [ 900.439141] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.453695] env[62619]: DEBUG nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 900.461025] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365071, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.461025] env[62619]: DEBUG nova.network.neutron [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Successfully created port: b6291b55-9d26-4a33-8cef-87166b1a2c83 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 900.600281] env[62619]: DEBUG oslo_vmware.api [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365066, 'name': RemoveSnapshot_Task, 'duration_secs': 0.697016} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.600642] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 900.600883] env[62619]: INFO nova.compute.manager [None req-be75e77d-73c2-45a0-9d3f-bd224ad4e581 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Took 15.86 seconds to snapshot the instance on the hypervisor. [ 900.617302] env[62619]: DEBUG oslo_vmware.api [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365067, 'name': PowerOnVM_Task, 'duration_secs': 0.78001} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.617302] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 900.617302] env[62619]: INFO nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Took 8.17 seconds to spawn the instance on the hypervisor. [ 900.617302] env[62619]: DEBUG nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 900.617302] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6989ba9a-c78f-4b47-a019-cd0f77d0bff9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.622366] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 900.622537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 900.622658] env[62619]: DEBUG nova.network.neutron [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 900.772266] env[62619]: DEBUG nova.network.neutron [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 900.801012] env[62619]: DEBUG nova.scheduler.client.report [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 900.818492] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1365070, 'name': PowerOffVM_Task, 'duration_secs': 0.221642} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.819408] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.820135] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 900.820675] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-180117c6-0c52-4f30-9a05-999b982fc656 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.829708] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 900.829708] env[62619]: value = "task-1365072" [ 900.829708] env[62619]: _type = "Task" [ 900.829708] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.849646] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 900.849646] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 900.849646] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290463', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'name': 'volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c30e0db3-9b63-44b7-9b7f-810defc530d1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'serial': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 900.849646] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49631459-b484-4c7f-ab1a-324b012d59d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.870916] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6dbe740-c381-449a-b98d-7ec10d5d9211 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.882941] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-982d981c-32d7-4ef0-8672-2db4874c67e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.906895] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad0fa075-081f-41c3-8afa-e74a3c5e4082 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.924558] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] The volume has not been displaced from its original location: [datastore1] volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc/volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 900.930177] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Reconfiguring VM instance instance-00000043 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 900.930572] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-af072c09-6e9a-4a4e-9aff-5a0e07531d89 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.956078] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365071, 'name': PowerOffVM_Task, 'duration_secs': 0.472932} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 900.957642] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 900.958018] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 900.958018] env[62619]: value = "task-1365073" [ 900.958018] env[62619]: _type = "Task" [ 900.958018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 900.958726] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-637ea342-d9b8-4cc1-bf7d-cbe3fe65ea55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.974463] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1365073, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 900.989591] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58b54a54-6653-44db-806b-2174d4700520 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 900.997904] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "ef1e80cf-2ea2-4764-851a-8aa97563a278" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 900.998082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "ef1e80cf-2ea2-4764-851a-8aa97563a278" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.033199] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 901.033520] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4c8f7eaa-79fa-4089-808e-531cdbe488c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.041426] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 901.041426] env[62619]: value = "task-1365074" [ 901.041426] env[62619]: _type = "Task" [ 901.041426] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.053123] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365074, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.139642] env[62619]: INFO nova.compute.manager [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Took 28.66 seconds to build instance. [ 901.162591] env[62619]: DEBUG nova.network.neutron [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 901.272425] env[62619]: INFO nova.compute.manager [-] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Took 1.04 seconds to deallocate network for instance. [ 901.300460] env[62619]: DEBUG nova.network.neutron [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Updating instance_info_cache with network_info: [{"id": "b2d0f2bc-db7c-485c-8126-98219c38a4fd", "address": "fa:16:3e:e7:7e:54", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d0f2bc-db", "ovs_interfaceid": "b2d0f2bc-db7c-485c-8126-98219c38a4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 901.307116] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.871s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.310631] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.431s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 901.311453] env[62619]: INFO nova.compute.claims [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 901.336561] env[62619]: INFO nova.scheduler.client.report [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleted allocations for instance 055a1071-bd4b-4d1b-88c0-7551a07aee9a [ 901.472294] env[62619]: DEBUG nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 901.474316] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1365073, 'name': ReconfigVM_Task, 'duration_secs': 0.179762} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 901.474735] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Reconfigured VM instance instance-00000043 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 901.479323] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-caffa19a-673b-4b96-86f4-b0d2965d04e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.500622] env[62619]: DEBUG nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 901.505842] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 901.505842] env[62619]: value = "task-1365075" [ 901.505842] env[62619]: _type = "Task" [ 901.505842] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.508262] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 901.508470] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 901.508651] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 901.508858] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 901.509040] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 901.509213] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 901.509453] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 901.509663] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 901.511058] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 901.511895] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 901.512124] env[62619]: DEBUG nova.virt.hardware [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 901.515726] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74926a54-2649-4548-bd6b-7aacf9df44b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.528330] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc389d7f-8002-4c19-91c8-f028c547440b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.533359] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1365075, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.554496] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 901.554712] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 901.554963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 901.555144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 901.555957] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 901.556261] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9f3af7bd-797b-4f43-996b-b5c38189f702 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.566638] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 901.566980] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 901.567988] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-69cc003b-2e14-4d0f-b4c6-1082f421629b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.576275] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 901.576275] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526088d7-af11-678b-0728-a3df68fdd776" [ 901.576275] env[62619]: _type = "Task" [ 901.576275] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.586513] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526088d7-af11-678b-0728-a3df68fdd776, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 901.642259] env[62619]: DEBUG oslo_concurrency.lockutils [None req-640209d1-a41b-451b-a288-892c80f0870a tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 31.874s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.781885] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 901.803064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 901.803417] env[62619]: DEBUG nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Instance network_info: |[{"id": "b2d0f2bc-db7c-485c-8126-98219c38a4fd", "address": "fa:16:3e:e7:7e:54", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d0f2bc-db", "ovs_interfaceid": "b2d0f2bc-db7c-485c-8126-98219c38a4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 901.804145] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e7:7e:54', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2d0f2bc-db7c-485c-8126-98219c38a4fd', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 901.814020] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating folder: Project (c61b5e689e5544e6857baf8d3c52fe0b). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.815230] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-465f6da4-e0f2-4f8e-b326-12c8d1865560 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.834202] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created folder: Project (c61b5e689e5544e6857baf8d3c52fe0b) in parent group-v290436. [ 901.834422] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating folder: Instances. Parent ref: group-v290511. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 901.835896] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-715b0641-368b-48db-a908-01bf7f9eacb2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.846230] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cea7c664-658f-4fc8-ad2c-9e71711a9eb0 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "055a1071-bd4b-4d1b-88c0-7551a07aee9a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.966s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 901.849800] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created folder: Instances in parent group-v290511. [ 901.849800] env[62619]: DEBUG oslo.service.loopingcall [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 901.849800] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 901.849800] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bd53dbf8-2cd9-4d32-9492-4dd624014b79 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 901.876484] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 901.876484] env[62619]: value = "task-1365078" [ 901.876484] env[62619]: _type = "Task" [ 901.876484] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 901.885933] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365078, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.027346] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1365075, 'name': ReconfigVM_Task, 'duration_secs': 0.219018} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.027346] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290463', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'name': 'volume-fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'c30e0db3-9b63-44b7-9b7f-810defc530d1', 'attached_at': '', 'detached_at': '', 'volume_id': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc', 'serial': 'fb506c39-d341-4d52-9b16-5d2ed093a6cc'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 902.027346] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.027346] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85683a3c-5f99-4d33-a316-61c8fc625a0a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.035964] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.036560] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-326929e4-1781-4d59-8514-80da29a6947f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.039647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.087086] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526088d7-af11-678b-0728-a3df68fdd776, 'name': SearchDatastore_Task, 'duration_secs': 0.013106} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.087995] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7ed91ccb-9dc9-4923-afbd-04d193f791be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.095685] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 902.095685] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ae621a-c29d-d8af-f344-3616024b82f7" [ 902.095685] env[62619]: _type = "Task" [ 902.095685] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.106534] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ae621a-c29d-d8af-f344-3616024b82f7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.111291] env[62619]: DEBUG nova.compute.manager [req-4578e63e-c772-4ae6-8eb0-2890e766f7ef req-d68b3f4c-3d58-4170-a9f2-ca8eff85b36f service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Received event network-changed-b2d0f2bc-db7c-485c-8126-98219c38a4fd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.111291] env[62619]: DEBUG nova.compute.manager [req-4578e63e-c772-4ae6-8eb0-2890e766f7ef req-d68b3f4c-3d58-4170-a9f2-ca8eff85b36f service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Refreshing instance network info cache due to event network-changed-b2d0f2bc-db7c-485c-8126-98219c38a4fd. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 902.113971] env[62619]: DEBUG oslo_concurrency.lockutils [req-4578e63e-c772-4ae6-8eb0-2890e766f7ef req-d68b3f4c-3d58-4170-a9f2-ca8eff85b36f service nova] Acquiring lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.114485] env[62619]: DEBUG oslo_concurrency.lockutils [req-4578e63e-c772-4ae6-8eb0-2890e766f7ef req-d68b3f4c-3d58-4170-a9f2-ca8eff85b36f service nova] Acquired lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.114947] env[62619]: DEBUG nova.network.neutron [req-4578e63e-c772-4ae6-8eb0-2890e766f7ef req-d68b3f4c-3d58-4170-a9f2-ca8eff85b36f service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Refreshing network info cache for port b2d0f2bc-db7c-485c-8126-98219c38a4fd {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 902.118015] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.118317] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.118565] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Deleting the datastore file [datastore1] c30e0db3-9b63-44b7-9b7f-810defc530d1 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.119158] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-59778021-a8d9-4093-a660-66d4c5f9e6ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.129689] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for the task: (returnval){ [ 902.129689] env[62619]: value = "task-1365080" [ 902.129689] env[62619]: _type = "Task" [ 902.129689] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.143730] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1365080, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.281484] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "59b960b6-aa41-4409-a899-9829388c3ff2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.281769] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "59b960b6-aa41-4409-a899-9829388c3ff2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.282015] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "59b960b6-aa41-4409-a899-9829388c3ff2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.282222] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "59b960b6-aa41-4409-a899-9829388c3ff2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.282398] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "59b960b6-aa41-4409-a899-9829388c3ff2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.284933] env[62619]: INFO nova.compute.manager [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Terminating instance [ 902.287211] env[62619]: DEBUG nova.compute.manager [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 902.287430] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 902.288262] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2f0b79-82d8-4322-89f7-b4d7763229c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.297038] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 902.297561] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73d913c6-aaff-416a-a279-1ba478030c1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.304756] env[62619]: DEBUG oslo_vmware.api [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 902.304756] env[62619]: value = "task-1365081" [ 902.304756] env[62619]: _type = "Task" [ 902.304756] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.316903] env[62619]: DEBUG oslo_vmware.api [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365081, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.398136] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365078, 'name': CreateVM_Task, 'duration_secs': 0.369541} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.398618] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 902.400681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.401068] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.401768] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 902.404989] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-31b0f8de-58f1-4b96-aea2-b685408ffa17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.412499] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 902.412499] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52162230-0590-ab61-a49a-1aca68131b19" [ 902.412499] env[62619]: _type = "Task" [ 902.412499] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.426554] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52162230-0590-ab61-a49a-1aca68131b19, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.499105] env[62619]: DEBUG nova.compute.manager [req-be1f3385-fac9-47fc-b921-d7d1c38dadbf req-2228ce3c-1168-4458-92f2-3947ba31b571 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Received event network-vif-plugged-b6291b55-9d26-4a33-8cef-87166b1a2c83 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.499891] env[62619]: DEBUG oslo_concurrency.lockutils [req-be1f3385-fac9-47fc-b921-d7d1c38dadbf req-2228ce3c-1168-4458-92f2-3947ba31b571 service nova] Acquiring lock "85e279da-e067-46f8-929b-87a013c4e7f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 902.500326] env[62619]: DEBUG oslo_concurrency.lockutils [req-be1f3385-fac9-47fc-b921-d7d1c38dadbf req-2228ce3c-1168-4458-92f2-3947ba31b571 service nova] Lock "85e279da-e067-46f8-929b-87a013c4e7f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 902.500684] env[62619]: DEBUG oslo_concurrency.lockutils [req-be1f3385-fac9-47fc-b921-d7d1c38dadbf req-2228ce3c-1168-4458-92f2-3947ba31b571 service nova] Lock "85e279da-e067-46f8-929b-87a013c4e7f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 902.501031] env[62619]: DEBUG nova.compute.manager [req-be1f3385-fac9-47fc-b921-d7d1c38dadbf req-2228ce3c-1168-4458-92f2-3947ba31b571 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] No waiting events found dispatching network-vif-plugged-b6291b55-9d26-4a33-8cef-87166b1a2c83 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 902.501389] env[62619]: WARNING nova.compute.manager [req-be1f3385-fac9-47fc-b921-d7d1c38dadbf req-2228ce3c-1168-4458-92f2-3947ba31b571 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Received unexpected event network-vif-plugged-b6291b55-9d26-4a33-8cef-87166b1a2c83 for instance with vm_state building and task_state spawning. [ 902.602515] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c86be0d2-e8cb-425e-9881-f879a5fb0f0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.615407] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0741403-8b66-4830-bfda-0f30d15f8d0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.618162] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ae621a-c29d-d8af-f344-3616024b82f7, 'name': SearchDatastore_Task, 'duration_secs': 0.014213} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.618464] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.618724] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 902.619396] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-09a5fe01-7719-47cd-a79a-a6ccdbfa3865 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.652970] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478634ca-4198-4a27-ad8f-791e545f6684 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.658967] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 902.658967] env[62619]: value = "task-1365082" [ 902.658967] env[62619]: _type = "Task" [ 902.658967] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.668030] env[62619]: DEBUG oslo_vmware.api [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Task: {'id': task-1365080, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.174987} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.669756] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cec61a3-0bad-452e-9e17-4fedbbbbed34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.675041] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 902.675269] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 902.675449] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 902.680226] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365082, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.693763] env[62619]: DEBUG nova.compute.provider_tree [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 902.745802] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 902.746380] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1d155636-ca76-4657-99e3-8b1535e94ed0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.759713] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bf6cae7-dcac-4c5c-b87e-2a44492c6743 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.795498] env[62619]: ERROR nova.compute.manager [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Failed to detach volume fb506c39-d341-4d52-9b16-5d2ed093a6cc from /dev/sda: nova.exception.InstanceNotFound: Instance c30e0db3-9b63-44b7-9b7f-810defc530d1 could not be found. [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Traceback (most recent call last): [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self.driver.rebuild(**kwargs) [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] raise NotImplementedError() [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] NotImplementedError [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] During handling of the above exception, another exception occurred: [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Traceback (most recent call last): [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self.driver.detach_volume(context, old_connection_info, [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] return self._volumeops.detach_volume(connection_info, instance) [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self._detach_volume_vmdk(connection_info, instance) [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] stable_ref.fetch_moref(session) [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] raise exception.InstanceNotFound(instance_id=self._uuid) [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] nova.exception.InstanceNotFound: Instance c30e0db3-9b63-44b7-9b7f-810defc530d1 could not be found. [ 902.795498] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] [ 902.816394] env[62619]: DEBUG oslo_vmware.api [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365081, 'name': PowerOffVM_Task, 'duration_secs': 0.212229} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.816667] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 902.816841] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 902.818088] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df372b91-cea5-46f5-a3a5-bd620ceecf07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.882603] env[62619]: DEBUG nova.compute.manager [req-f910ea5d-f2be-453c-ab51-694d29d756ac req-604ad3ee-0b35-4e47-b20c-ba01cda40d94 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 902.882819] env[62619]: DEBUG nova.compute.manager [req-f910ea5d-f2be-453c-ab51-694d29d756ac req-604ad3ee-0b35-4e47-b20c-ba01cda40d94 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing instance network info cache due to event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 902.883123] env[62619]: DEBUG oslo_concurrency.lockutils [req-f910ea5d-f2be-453c-ab51-694d29d756ac req-604ad3ee-0b35-4e47-b20c-ba01cda40d94 service nova] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.883305] env[62619]: DEBUG oslo_concurrency.lockutils [req-f910ea5d-f2be-453c-ab51-694d29d756ac req-604ad3ee-0b35-4e47-b20c-ba01cda40d94 service nova] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.883513] env[62619]: DEBUG nova.network.neutron [req-f910ea5d-f2be-453c-ab51-694d29d756ac req-604ad3ee-0b35-4e47-b20c-ba01cda40d94 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 902.889099] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 902.889892] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 902.890395] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleting the datastore file [datastore2] 59b960b6-aa41-4409-a899-9829388c3ff2 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 902.890486] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-31004d6f-dcda-46a1-9519-82a3168d319a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.902801] env[62619]: DEBUG oslo_vmware.api [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 902.902801] env[62619]: value = "task-1365084" [ 902.902801] env[62619]: _type = "Task" [ 902.902801] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.913389] env[62619]: DEBUG oslo_vmware.api [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.924776] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52162230-0590-ab61-a49a-1aca68131b19, 'name': SearchDatastore_Task, 'duration_secs': 0.01122} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 902.927737] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 902.929089] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 902.929089] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 902.929089] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 902.929089] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 902.929586] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0a8323d9-0c24-4007-933b-b39c40673d3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.940948] env[62619]: DEBUG nova.network.neutron [req-4578e63e-c772-4ae6-8eb0-2890e766f7ef req-d68b3f4c-3d58-4170-a9f2-ca8eff85b36f service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Updated VIF entry in instance network info cache for port b2d0f2bc-db7c-485c-8126-98219c38a4fd. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 902.940948] env[62619]: DEBUG nova.network.neutron [req-4578e63e-c772-4ae6-8eb0-2890e766f7ef req-d68b3f4c-3d58-4170-a9f2-ca8eff85b36f service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Updating instance_info_cache with network_info: [{"id": "b2d0f2bc-db7c-485c-8126-98219c38a4fd", "address": "fa:16:3e:e7:7e:54", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d0f2bc-db", "ovs_interfaceid": "b2d0f2bc-db7c-485c-8126-98219c38a4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 902.960254] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 902.960254] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 902.960254] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e8e4259-f3be-4696-8703-39380fe3275a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.971024] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 902.971024] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52339dae-9a3d-f240-4587-c7982b25d26f" [ 902.971024] env[62619]: _type = "Task" [ 902.971024] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.982803] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52339dae-9a3d-f240-4587-c7982b25d26f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 902.984663] env[62619]: DEBUG nova.compute.utils [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Build of instance c30e0db3-9b63-44b7-9b7f-810defc530d1 aborted: Failed to rebuild volume backed instance. {{(pid=62619) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 902.988715] env[62619]: ERROR nova.compute.manager [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Setting instance vm_state to ERROR: nova.exception.BuildAbortException: Build of instance c30e0db3-9b63-44b7-9b7f-810defc530d1 aborted: Failed to rebuild volume backed instance. [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Traceback (most recent call last): [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 4142, in _do_rebuild_instance [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self.driver.rebuild(**kwargs) [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/driver.py", line 493, in rebuild [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] raise NotImplementedError() [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] NotImplementedError [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] During handling of the above exception, another exception occurred: [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Traceback (most recent call last): [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 3600, in _rebuild_volume_backed_instance [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self._detach_root_volume(context, instance, root_bdm) [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 3579, in _detach_root_volume [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] with excutils.save_and_reraise_exception(): [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self.force_reraise() [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] raise self.value [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 3565, in _detach_root_volume [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self.driver.detach_volume(context, old_connection_info, [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 553, in detach_volume [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] return self._volumeops.detach_volume(connection_info, instance) [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 649, in detach_volume [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self._detach_volume_vmdk(connection_info, instance) [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/volumeops.py", line 569, in _detach_volume_vmdk [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] vm_ref = vm_util.get_vm_ref(self._session, instance) [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1145, in get_vm_ref [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] stable_ref.fetch_moref(session) [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1136, in fetch_moref [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] raise exception.InstanceNotFound(instance_id=self._uuid) [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] nova.exception.InstanceNotFound: Instance c30e0db3-9b63-44b7-9b7f-810defc530d1 could not be found. [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] During handling of the above exception, another exception occurred: [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Traceback (most recent call last): [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 10865, in _error_out_instance_on_exception [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] yield [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 3868, in rebuild_instance [ 902.988715] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self._do_rebuild_instance_with_claim( [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 3954, in _do_rebuild_instance_with_claim [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self._do_rebuild_instance( [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 4146, in _do_rebuild_instance [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self._rebuild_default_impl(**kwargs) [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 3723, in _rebuild_default_impl [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] self._rebuild_volume_backed_instance( [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] File "/opt/stack/nova/nova/compute/manager.py", line 3615, in _rebuild_volume_backed_instance [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] raise exception.BuildAbortException( [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] nova.exception.BuildAbortException: Build of instance c30e0db3-9b63-44b7-9b7f-810defc530d1 aborted: Failed to rebuild volume backed instance. [ 902.992102] env[62619]: ERROR nova.compute.manager [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] [ 903.113062] env[62619]: DEBUG nova.network.neutron [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Successfully updated port: b6291b55-9d26-4a33-8cef-87166b1a2c83 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 903.175599] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365082, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.199745] env[62619]: DEBUG nova.scheduler.client.report [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 903.415239] env[62619]: DEBUG oslo_vmware.api [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365084, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.444168] env[62619]: DEBUG oslo_concurrency.lockutils [req-4578e63e-c772-4ae6-8eb0-2890e766f7ef req-d68b3f4c-3d58-4170-a9f2-ca8eff85b36f service nova] Releasing lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 903.483063] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52339dae-9a3d-f240-4587-c7982b25d26f, 'name': SearchDatastore_Task, 'duration_secs': 0.059583} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.483873] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-299d1878-ee64-4834-990e-026ec2b7961b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.491488] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 903.491488] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527b8e6f-69b4-0c66-bfc2-0aec2d865e36" [ 903.491488] env[62619]: _type = "Task" [ 903.491488] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.503042] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527b8e6f-69b4-0c66-bfc2-0aec2d865e36, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.615349] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "refresh_cache-85e279da-e067-46f8-929b-87a013c4e7f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 903.615517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "refresh_cache-85e279da-e067-46f8-929b-87a013c4e7f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 903.615635] env[62619]: DEBUG nova.network.neutron [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 903.621110] env[62619]: DEBUG nova.network.neutron [req-f910ea5d-f2be-453c-ab51-694d29d756ac req-604ad3ee-0b35-4e47-b20c-ba01cda40d94 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updated VIF entry in instance network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 903.621569] env[62619]: DEBUG nova.network.neutron [req-f910ea5d-f2be-453c-ab51-694d29d756ac req-604ad3ee-0b35-4e47-b20c-ba01cda40d94 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 903.671907] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365082, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.596016} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.672233] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk. [ 903.672971] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee47e9d9-4ed7-4067-bdda-626ab195c6d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.698520] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Reconfiguring VM instance instance-0000004f to attach disk [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 903.699145] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-068c33d8-356c-4b22-98dc-30edc0839a7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.712694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.403s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 903.713210] env[62619]: DEBUG nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 903.715992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 9.465s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 903.724776] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 903.724776] env[62619]: value = "task-1365085" [ 903.724776] env[62619]: _type = "Task" [ 903.724776] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.733026] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365085, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.914226] env[62619]: DEBUG oslo_vmware.api [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365084, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.537372} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.914498] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.914677] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 903.914878] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 903.915067] env[62619]: INFO nova.compute.manager [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Took 1.63 seconds to destroy the instance on the hypervisor. [ 903.915326] env[62619]: DEBUG oslo.service.loopingcall [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 903.915541] env[62619]: DEBUG nova.compute.manager [-] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 903.915630] env[62619]: DEBUG nova.network.neutron [-] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.006085] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527b8e6f-69b4-0c66-bfc2-0aec2d865e36, 'name': SearchDatastore_Task, 'duration_secs': 0.036221} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 904.006383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.006654] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 9f7d7830-b878-41b9-a236-f7cd5580cf1d/9f7d7830-b878-41b9-a236-f7cd5580cf1d.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 904.006921] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2f3701c6-7fb2-44af-b0e2-72aabe3dd355 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.019179] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 904.019179] env[62619]: value = "task-1365086" [ 904.019179] env[62619]: _type = "Task" [ 904.019179] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 904.030969] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365086, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.124194] env[62619]: DEBUG oslo_concurrency.lockutils [req-f910ea5d-f2be-453c-ab51-694d29d756ac req-604ad3ee-0b35-4e47-b20c-ba01cda40d94 service nova] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 904.213628] env[62619]: DEBUG nova.network.neutron [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 904.222056] env[62619]: DEBUG nova.compute.utils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 904.231675] env[62619]: INFO nova.compute.claims [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 904.235316] env[62619]: DEBUG nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 904.235549] env[62619]: DEBUG nova.network.neutron [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 904.252350] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.283124] env[62619]: DEBUG nova.policy [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ce526a1d824fe8b6573fa80adcd53f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33242a5e0a764cf3b8af687fc4302e8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 904.530867] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365086, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.568722] env[62619]: DEBUG nova.network.neutron [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Successfully created port: 9183d7c6-bf8f-4386-b4b0-aa8ed284959a {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 904.594560] env[62619]: DEBUG nova.network.neutron [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Updating instance_info_cache with network_info: [{"id": "b6291b55-9d26-4a33-8cef-87166b1a2c83", "address": "fa:16:3e:bb:37:8f", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6291b55-9d", "ovs_interfaceid": "b6291b55-9d26-4a33-8cef-87166b1a2c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.683170] env[62619]: DEBUG nova.network.neutron [-] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.737204] env[62619]: INFO nova.compute.resource_tracker [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating resource usage from migration ec366ade-42a0-4f38-a1b8-bddeb5641557 [ 904.747602] env[62619]: DEBUG nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 904.770814] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365085, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 904.920339] env[62619]: DEBUG nova.compute.manager [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received event network-changed-d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.920642] env[62619]: DEBUG nova.compute.manager [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing instance network info cache due to event network-changed-d70279ce-58c0-45c4-9a74-8f1f74552d21. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 904.920882] env[62619]: DEBUG oslo_concurrency.lockutils [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] Acquiring lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 904.920959] env[62619]: DEBUG oslo_concurrency.lockutils [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] Acquired lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 904.921781] env[62619]: DEBUG nova.network.neutron [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing network info cache for port d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 904.931056] env[62619]: DEBUG nova.compute.manager [req-cd8e1677-432f-462f-bf60-02b74e278182 req-1d061f6a-978d-4f20-81e2-1af010e96380 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received event network-changed-d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 904.931056] env[62619]: DEBUG nova.compute.manager [req-cd8e1677-432f-462f-bf60-02b74e278182 req-1d061f6a-978d-4f20-81e2-1af010e96380 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing instance network info cache due to event network-changed-d70279ce-58c0-45c4-9a74-8f1f74552d21. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 904.931056] env[62619]: DEBUG oslo_concurrency.lockutils [req-cd8e1677-432f-462f-bf60-02b74e278182 req-1d061f6a-978d-4f20-81e2-1af010e96380 service nova] Acquiring lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 905.006843] env[62619]: DEBUG oslo_concurrency.lockutils [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.014019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-faee2dd3-b09f-44f1-b438-f7685da96389 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.020790] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339f86cd-5586-452f-af3f-a04f62421646 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.063672] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7964752d-504f-4987-b968-02d007b05ba7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.066373] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365086, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.016493} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.067175] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 9f7d7830-b878-41b9-a236-f7cd5580cf1d/9f7d7830-b878-41b9-a236-f7cd5580cf1d.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 905.067402] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 905.068120] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8bf80c43-aa81-44c7-b9d2-09800038005f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.073749] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d22d2bc-f882-4f11-9826-dde834b99e26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.079950] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 905.079950] env[62619]: value = "task-1365087" [ 905.079950] env[62619]: _type = "Task" [ 905.079950] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.089047] env[62619]: DEBUG nova.compute.provider_tree [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.100908] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "refresh_cache-85e279da-e067-46f8-929b-87a013c4e7f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 905.100908] env[62619]: DEBUG nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Instance network_info: |[{"id": "b6291b55-9d26-4a33-8cef-87166b1a2c83", "address": "fa:16:3e:bb:37:8f", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6291b55-9d", "ovs_interfaceid": "b6291b55-9d26-4a33-8cef-87166b1a2c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 905.100908] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365087, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.101393] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:37:8f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b6291b55-9d26-4a33-8cef-87166b1a2c83', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 905.108590] env[62619]: DEBUG oslo.service.loopingcall [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 905.109188] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 905.109356] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-60d3ea40-6a55-4243-a9d6-5e70e087261b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.130634] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 905.130634] env[62619]: value = "task-1365088" [ 905.130634] env[62619]: _type = "Task" [ 905.130634] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.139465] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365088, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.186919] env[62619]: INFO nova.compute.manager [-] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Took 1.27 seconds to deallocate network for instance. [ 905.272249] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365085, 'name': ReconfigVM_Task, 'duration_secs': 1.353898} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.272592] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Reconfigured VM instance instance-0000004f to attach disk [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 905.273442] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8863fe55-0907-4425-98d2-23580d7e7d6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.298930] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9bb6d5ab-069b-458a-90e7-d5f9a172c7df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.315464] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 905.315464] env[62619]: value = "task-1365089" [ 905.315464] env[62619]: _type = "Task" [ 905.315464] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.326734] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365089, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.595639] env[62619]: DEBUG nova.scheduler.client.report [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 905.600898] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365087, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.257056} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.603686] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 905.604605] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-860aea8f-b4c9-4013-a831-d0bce562211f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.632019] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Reconfiguring VM instance instance-00000051 to attach disk [datastore2] 9f7d7830-b878-41b9-a236-f7cd5580cf1d/9f7d7830-b878-41b9-a236-f7cd5580cf1d.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 905.632019] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ee41e3d6-d9ee-4adb-8ac5-6d6fb298d480 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.653914] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365088, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.655600] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 905.655600] env[62619]: value = "task-1365090" [ 905.655600] env[62619]: _type = "Task" [ 905.655600] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.666546] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365090, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 905.693610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 905.720190] env[62619]: DEBUG nova.network.neutron [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updated VIF entry in instance network info cache for port d70279ce-58c0-45c4-9a74-8f1f74552d21. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 905.720698] env[62619]: DEBUG nova.network.neutron [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 905.769226] env[62619]: DEBUG nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 905.801465] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 905.801769] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 905.801979] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 905.802743] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 905.802743] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 905.802743] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 905.802743] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 905.803191] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 905.803191] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 905.803843] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 905.803843] env[62619]: DEBUG nova.virt.hardware [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 905.804566] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4763feaf-7560-4d67-80e3-9c13c6f97839 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.814127] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e2bfdd6-8ddf-45ca-8f0b-6a4248dc7a66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.836665] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365089, 'name': ReconfigVM_Task, 'duration_secs': 0.490932} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 905.836976] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 905.837254] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-095c764f-00a8-4e65-bca3-9ef37b8e44b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.845470] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 905.845470] env[62619]: value = "task-1365091" [ 905.845470] env[62619]: _type = "Task" [ 905.845470] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 905.855112] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365091, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.008134] env[62619]: DEBUG nova.compute.manager [req-6310f2d7-0209-4de2-b05d-89cd7856255e req-a055af86-597c-488d-b251-ec41c0e48349 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Received event network-vif-plugged-9183d7c6-bf8f-4386-b4b0-aa8ed284959a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.008596] env[62619]: DEBUG oslo_concurrency.lockutils [req-6310f2d7-0209-4de2-b05d-89cd7856255e req-a055af86-597c-488d-b251-ec41c0e48349 service nova] Acquiring lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.008692] env[62619]: DEBUG oslo_concurrency.lockutils [req-6310f2d7-0209-4de2-b05d-89cd7856255e req-a055af86-597c-488d-b251-ec41c0e48349 service nova] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.008822] env[62619]: DEBUG oslo_concurrency.lockutils [req-6310f2d7-0209-4de2-b05d-89cd7856255e req-a055af86-597c-488d-b251-ec41c0e48349 service nova] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.008994] env[62619]: DEBUG nova.compute.manager [req-6310f2d7-0209-4de2-b05d-89cd7856255e req-a055af86-597c-488d-b251-ec41c0e48349 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] No waiting events found dispatching network-vif-plugged-9183d7c6-bf8f-4386-b4b0-aa8ed284959a {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 906.009392] env[62619]: WARNING nova.compute.manager [req-6310f2d7-0209-4de2-b05d-89cd7856255e req-a055af86-597c-488d-b251-ec41c0e48349 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Received unexpected event network-vif-plugged-9183d7c6-bf8f-4386-b4b0-aa8ed284959a for instance with vm_state building and task_state spawning. [ 906.102522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.387s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.102845] env[62619]: INFO nova.compute.manager [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Migrating [ 906.103064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute-rpcapi-router" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.103223] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "compute-rpcapi-router" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.104619] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: waited 9.194s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.104819] env[62619]: DEBUG nova.objects.instance [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Trying to apply a migration context that does not seem to be set for this instance {{(pid=62619) apply_migration_context /opt/stack/nova/nova/objects/instance.py:1067}} [ 906.159190] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365088, 'name': CreateVM_Task, 'duration_secs': 0.756915} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.163409] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 906.165023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.165947] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.165947] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 906.166599] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-28cb9850-81c8-437e-ac92-b8de25b73a13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.173899] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365090, 'name': ReconfigVM_Task, 'duration_secs': 0.447015} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.175128] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Reconfigured VM instance instance-00000051 to attach disk [datastore2] 9f7d7830-b878-41b9-a236-f7cd5580cf1d/9f7d7830-b878-41b9-a236-f7cd5580cf1d.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 906.175828] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 906.175828] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525ba569-2a7e-e99e-92d2-1aea62654f06" [ 906.175828] env[62619]: _type = "Task" [ 906.175828] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.176032] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8ec30ca9-db93-41e1-9097-01edbc1d24ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.187071] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525ba569-2a7e-e99e-92d2-1aea62654f06, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.188779] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 906.188779] env[62619]: value = "task-1365092" [ 906.188779] env[62619]: _type = "Task" [ 906.188779] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.197484] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365092, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.223665] env[62619]: DEBUG oslo_concurrency.lockutils [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] Releasing lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.223991] env[62619]: DEBUG nova.compute.manager [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Received event network-changed-b6291b55-9d26-4a33-8cef-87166b1a2c83 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 906.224185] env[62619]: DEBUG nova.compute.manager [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Refreshing instance network info cache due to event network-changed-b6291b55-9d26-4a33-8cef-87166b1a2c83. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 906.224437] env[62619]: DEBUG oslo_concurrency.lockutils [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] Acquiring lock "refresh_cache-85e279da-e067-46f8-929b-87a013c4e7f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.224592] env[62619]: DEBUG oslo_concurrency.lockutils [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] Acquired lock "refresh_cache-85e279da-e067-46f8-929b-87a013c4e7f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.224848] env[62619]: DEBUG nova.network.neutron [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Refreshing network info cache for port b6291b55-9d26-4a33-8cef-87166b1a2c83 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 906.226140] env[62619]: DEBUG oslo_concurrency.lockutils [req-cd8e1677-432f-462f-bf60-02b74e278182 req-1d061f6a-978d-4f20-81e2-1af010e96380 service nova] Acquired lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.226335] env[62619]: DEBUG nova.network.neutron [req-cd8e1677-432f-462f-bf60-02b74e278182 req-1d061f6a-978d-4f20-81e2-1af010e96380 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing network info cache for port d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 906.355739] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365091, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.536462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Acquiring lock "c30e0db3-9b63-44b7-9b7f-810defc530d1" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.536462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "c30e0db3-9b63-44b7-9b7f-810defc530d1" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.536462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Acquiring lock "c30e0db3-9b63-44b7-9b7f-810defc530d1-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.536462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "c30e0db3-9b63-44b7-9b7f-810defc530d1-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.536462] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "c30e0db3-9b63-44b7-9b7f-810defc530d1-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.538861] env[62619]: INFO nova.compute.manager [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Terminating instance [ 906.540729] env[62619]: DEBUG nova.compute.manager [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 906.541074] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9595eb6b-a9a4-4141-b335-625a69b37de4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.551828] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c2f199d-1441-4d41-a262-6f97474d7b32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.583032] env[62619]: WARNING nova.virt.vmwareapi.driver [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Instance does not exists. Proceeding to delete instance properties on datastore: nova.exception.InstanceNotFound: Instance c30e0db3-9b63-44b7-9b7f-810defc530d1 could not be found. [ 906.583269] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 906.584354] env[62619]: DEBUG nova.network.neutron [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Successfully updated port: 9183d7c6-bf8f-4386-b4b0-aa8ed284959a {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 906.585566] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-22edc9d7-5d49-4ac6-8ae0-f57d10d3a1fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.595718] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7584ed66-1602-4751-8f5b-b69477abbd06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.608493] env[62619]: INFO nova.compute.rpcapi [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Automatically selected compute RPC version 6.3 from minimum service version 67 [ 906.609043] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "compute-rpcapi-router" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.632374] env[62619]: WARNING nova.virt.vmwareapi.vmops [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance c30e0db3-9b63-44b7-9b7f-810defc530d1 could not be found. [ 906.632587] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 906.632766] env[62619]: INFO nova.compute.manager [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Took 0.09 seconds to destroy the instance on the hypervisor. [ 906.633029] env[62619]: DEBUG oslo.service.loopingcall [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 906.638037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d01dbe0b-6399-4ee4-86a4-fed1b5d9d41b tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.finish_evacuation" :: held 0.533s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 906.638812] env[62619]: DEBUG nova.compute.manager [-] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 906.638934] env[62619]: DEBUG nova.network.neutron [-] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 906.641276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.860s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.641522] env[62619]: DEBUG nova.objects.instance [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lazy-loading 'resources' on Instance uuid da5a8beb-0246-43df-9813-436ddf8598a9 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.656024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "interface-646b4ae6-09e1-4b3c-b17d-392e746df454-9413dc32-2f0c-4650-952a-63bed028a099" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 906.656024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "interface-646b4ae6-09e1-4b3c-b17d-392e746df454-9413dc32-2f0c-4650-952a-63bed028a099" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 906.656024] env[62619]: DEBUG nova.objects.instance [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lazy-loading 'flavor' on Instance uuid 646b4ae6-09e1-4b3c-b17d-392e746df454 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 906.689151] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525ba569-2a7e-e99e-92d2-1aea62654f06, 'name': SearchDatastore_Task, 'duration_secs': 0.013779} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.689579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 906.689847] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 906.690126] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 906.690285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 906.690464] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 906.693558] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b3d102ec-4d11-4b14-b6fb-f44e054e10ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.701503] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365092, 'name': Rename_Task, 'duration_secs': 0.152943} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.701744] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 906.701917] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b7d3927d-7ee9-4a5a-8b0d-6b1ed0724bb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.706772] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 906.706949] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 906.707883] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cbf8ab3d-936c-45f7-a34b-618f8a75213a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.713474] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 906.713474] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fd72c6-3291-4c18-cf51-b067c1be51c0" [ 906.713474] env[62619]: _type = "Task" [ 906.713474] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.717131] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 906.717131] env[62619]: value = "task-1365093" [ 906.717131] env[62619]: _type = "Task" [ 906.717131] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 906.726050] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fd72c6-3291-4c18-cf51-b067c1be51c0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.732919] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365093, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 906.856334] env[62619]: DEBUG oslo_vmware.api [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365091, 'name': PowerOnVM_Task, 'duration_secs': 0.763531} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 906.856623] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 906.859550] env[62619]: DEBUG nova.compute.manager [None req-f295ae5f-e922-4fce-b1d3-5c1c11859d97 tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 906.860344] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7149ad0-89af-44ca-8941-0a038c98d01b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.049183] env[62619]: DEBUG nova.compute.manager [req-ca7bed80-0d9b-4a17-8497-e2c1f855fbaa req-ce586d53-217c-41ea-9208-d8c4b39062f6 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Received event network-changed-9183d7c6-bf8f-4386-b4b0-aa8ed284959a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 907.049525] env[62619]: DEBUG nova.compute.manager [req-ca7bed80-0d9b-4a17-8497-e2c1f855fbaa req-ce586d53-217c-41ea-9208-d8c4b39062f6 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Refreshing instance network info cache due to event network-changed-9183d7c6-bf8f-4386-b4b0-aa8ed284959a. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 907.049879] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca7bed80-0d9b-4a17-8497-e2c1f855fbaa req-ce586d53-217c-41ea-9208-d8c4b39062f6 service nova] Acquiring lock "refresh_cache-42aeba4e-5c87-46d5-9c7c-c6f263c69171" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.050145] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca7bed80-0d9b-4a17-8497-e2c1f855fbaa req-ce586d53-217c-41ea-9208-d8c4b39062f6 service nova] Acquired lock "refresh_cache-42aeba4e-5c87-46d5-9c7c-c6f263c69171" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.050396] env[62619]: DEBUG nova.network.neutron [req-ca7bed80-0d9b-4a17-8497-e2c1f855fbaa req-ce586d53-217c-41ea-9208-d8c4b39062f6 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Refreshing network info cache for port 9183d7c6-bf8f-4386-b4b0-aa8ed284959a {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 907.068083] env[62619]: DEBUG nova.compute.manager [req-6dbb5048-2925-4274-964d-cdf4bc379236 req-38ff7494-5aed-4bb8-817a-e55c0701250a service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 907.068337] env[62619]: DEBUG nova.compute.manager [req-6dbb5048-2925-4274-964d-cdf4bc379236 req-38ff7494-5aed-4bb8-817a-e55c0701250a service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing instance network info cache due to event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 907.068489] env[62619]: DEBUG oslo_concurrency.lockutils [req-6dbb5048-2925-4274-964d-cdf4bc379236 req-38ff7494-5aed-4bb8-817a-e55c0701250a service nova] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.068686] env[62619]: DEBUG oslo_concurrency.lockutils [req-6dbb5048-2925-4274-964d-cdf4bc379236 req-38ff7494-5aed-4bb8-817a-e55c0701250a service nova] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.068788] env[62619]: DEBUG nova.network.neutron [req-6dbb5048-2925-4274-964d-cdf4bc379236 req-38ff7494-5aed-4bb8-817a-e55c0701250a service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 907.088302] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-42aeba4e-5c87-46d5-9c7c-c6f263c69171" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.138329] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 907.138506] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 907.138595] env[62619]: DEBUG nova.network.neutron [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 907.181631] env[62619]: DEBUG nova.network.neutron [req-cd8e1677-432f-462f-bf60-02b74e278182 req-1d061f6a-978d-4f20-81e2-1af010e96380 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updated VIF entry in instance network info cache for port d70279ce-58c0-45c4-9a74-8f1f74552d21. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 907.181991] env[62619]: DEBUG nova.network.neutron [req-cd8e1677-432f-462f-bf60-02b74e278182 req-1d061f6a-978d-4f20-81e2-1af010e96380 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.930670] env[62619]: DEBUG nova.network.neutron [-] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.931780] env[62619]: DEBUG oslo_concurrency.lockutils [req-cd8e1677-432f-462f-bf60-02b74e278182 req-1d061f6a-978d-4f20-81e2-1af010e96380 service nova] Releasing lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.932825] env[62619]: DEBUG nova.network.neutron [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Updated VIF entry in instance network info cache for port b6291b55-9d26-4a33-8cef-87166b1a2c83. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 907.933137] env[62619]: DEBUG nova.network.neutron [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Updating instance_info_cache with network_info: [{"id": "b6291b55-9d26-4a33-8cef-87166b1a2c83", "address": "fa:16:3e:bb:37:8f", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb6291b55-9d", "ovs_interfaceid": "b6291b55-9d26-4a33-8cef-87166b1a2c83", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.955295] env[62619]: DEBUG oslo_vmware.api [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365093, 'name': PowerOnVM_Task, 'duration_secs': 0.635461} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.955552] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fd72c6-3291-4c18-cf51-b067c1be51c0, 'name': SearchDatastore_Task, 'duration_secs': 0.017906} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.955797] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 907.955991] env[62619]: INFO nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Took 8.85 seconds to spawn the instance on the hypervisor. [ 907.956807] env[62619]: DEBUG nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 907.958573] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42d8e944-60eb-4142-a39f-5c5aa34388a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.961392] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-83fef154-f6f7-4c76-81af-076f511ccba5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.976704] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 907.976704] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52da2a09-1a29-7044-2f3e-9572346c0e85" [ 907.976704] env[62619]: _type = "Task" [ 907.976704] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.991117] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52da2a09-1a29-7044-2f3e-9572346c0e85, 'name': SearchDatastore_Task, 'duration_secs': 0.015353} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.995168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 907.995168] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 85e279da-e067-46f8-929b-87a013c4e7f4/85e279da-e067-46f8-929b-87a013c4e7f4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 907.995168] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a1577f81-502f-491a-b410-bef9f141dedd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.007143] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 908.007143] env[62619]: value = "task-1365094" [ 908.007143] env[62619]: _type = "Task" [ 908.007143] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.009122] env[62619]: DEBUG nova.network.neutron [req-ca7bed80-0d9b-4a17-8497-e2c1f855fbaa req-ce586d53-217c-41ea-9208-d8c4b39062f6 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 908.020556] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365094, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.104050] env[62619]: DEBUG nova.objects.instance [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lazy-loading 'pci_requests' on Instance uuid 646b4ae6-09e1-4b3c-b17d-392e746df454 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 908.160886] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c529bd6-87bb-4aea-b7b6-34d6b58ca377 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.165694] env[62619]: DEBUG nova.network.neutron [req-ca7bed80-0d9b-4a17-8497-e2c1f855fbaa req-ce586d53-217c-41ea-9208-d8c4b39062f6 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.169580] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4bdb488-1926-405f-a49c-7e0f89a09d29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.216265] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ad2249-ae8e-4782-82cb-e1427cb008c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.226860] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8086071b-e0e7-46af-aa7d-5475fb96ebfa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.244675] env[62619]: DEBUG nova.compute.provider_tree [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 908.434899] env[62619]: INFO nova.compute.manager [-] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Took 1.80 seconds to deallocate network for instance. [ 908.444441] env[62619]: DEBUG oslo_concurrency.lockutils [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] Releasing lock "refresh_cache-85e279da-e067-46f8-929b-87a013c4e7f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.444927] env[62619]: DEBUG nova.compute.manager [req-e204410c-6f9c-48d0-b2bd-c2370105ec3a req-65af7591-2767-4bbb-bffa-3b0ccb1298f3 service nova] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Received event network-vif-deleted-5f53828c-c7ba-4916-a4e8-82eef12e1166 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 908.487088] env[62619]: INFO nova.compute.manager [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Took 23.11 seconds to build instance. [ 908.502415] env[62619]: DEBUG nova.network.neutron [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance_info_cache with network_info: [{"id": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "address": "fa:16:3e:d5:c3:bc", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0389ef1-cf", "ovs_interfaceid": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.525906] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365094, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.568322] env[62619]: DEBUG nova.network.neutron [req-6dbb5048-2925-4274-964d-cdf4bc379236 req-38ff7494-5aed-4bb8-817a-e55c0701250a service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updated VIF entry in instance network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 908.568783] env[62619]: DEBUG nova.network.neutron [req-6dbb5048-2925-4274-964d-cdf4bc379236 req-38ff7494-5aed-4bb8-817a-e55c0701250a service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 908.611620] env[62619]: DEBUG nova.objects.base [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Object Instance<646b4ae6-09e1-4b3c-b17d-392e746df454> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 908.611620] env[62619]: DEBUG nova.network.neutron [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 908.674244] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca7bed80-0d9b-4a17-8497-e2c1f855fbaa req-ce586d53-217c-41ea-9208-d8c4b39062f6 service nova] Releasing lock "refresh_cache-42aeba4e-5c87-46d5-9c7c-c6f263c69171" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 908.674661] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-42aeba4e-5c87-46d5-9c7c-c6f263c69171" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 908.674838] env[62619]: DEBUG nova.network.neutron [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 908.721257] env[62619]: DEBUG nova.policy [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3041343376d4f2fad14577d5c412b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4270942193cd4a9aa397784368b9ae64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 908.748624] env[62619]: DEBUG nova.scheduler.client.report [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 908.993190] env[62619]: INFO nova.compute.manager [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Took 0.56 seconds to detach 1 volumes for instance. [ 908.996437] env[62619]: DEBUG nova.compute.manager [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Deleting volume: fb506c39-d341-4d52-9b16-5d2ed093a6cc {{(pid=62619) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3247}} [ 908.997731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4048023a-b459-402c-82ee-b344bad7ecf2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 24.624s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.006220] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.024223] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365094, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.550592} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.024397] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 85e279da-e067-46f8-929b-87a013c4e7f4/85e279da-e067-46f8-929b-87a013c4e7f4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 909.024623] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 909.025116] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-57af53cb-a459-4a96-bcbb-68be68a7fb9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.033015] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 909.033015] env[62619]: value = "task-1365095" [ 909.033015] env[62619]: _type = "Task" [ 909.033015] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.043334] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365095, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.072786] env[62619]: DEBUG oslo_concurrency.lockutils [req-6dbb5048-2925-4274-964d-cdf4bc379236 req-38ff7494-5aed-4bb8-817a-e55c0701250a service nova] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 909.233529] env[62619]: DEBUG nova.network.neutron [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 909.255087] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.614s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.262373] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.218s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 909.264018] env[62619]: INFO nova.compute.claims [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 909.286044] env[62619]: INFO nova.scheduler.client.report [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Deleted allocations for instance da5a8beb-0246-43df-9813-436ddf8598a9 [ 909.519157] env[62619]: DEBUG nova.network.neutron [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Updating instance_info_cache with network_info: [{"id": "9183d7c6-bf8f-4386-b4b0-aa8ed284959a", "address": "fa:16:3e:76:bb:94", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9183d7c6-bf", "ovs_interfaceid": "9183d7c6-bf8f-4386-b4b0-aa8ed284959a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.545058] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365095, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071662} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.545390] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 909.546457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dcad62a-8b92-4acb-8b28-1e7ffbda01c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.553693] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 909.573913] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Reconfiguring VM instance instance-00000052 to attach disk [datastore2] 85e279da-e067-46f8-929b-87a013c4e7f4/85e279da-e067-46f8-929b-87a013c4e7f4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 909.573913] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1245f071-ae83-4086-a442-5992c9a3f189 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.594468] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 909.594468] env[62619]: value = "task-1365097" [ 909.594468] env[62619]: _type = "Task" [ 909.594468] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.603277] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365097, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.793261] env[62619]: DEBUG nova.compute.manager [req-bff555aa-706e-4df5-a74a-5089d70f171e req-b33f1119-24a1-4f4d-a90d-561a462cb102 service nova] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Received event network-vif-deleted-5f9d96e1-ec69-4313-99e3-a91700248c54 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.797063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f202109c-3d8b-41ab-b78c-5ab4ed0598ba tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "da5a8beb-0246-43df-9813-436ddf8598a9" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.813s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 909.818962] env[62619]: DEBUG nova.compute.manager [req-cc8b6565-720b-45a4-aeed-949781cdb9a6 req-b013d0c3-f674-4775-a15f-a835ed31f758 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Received event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 909.819167] env[62619]: DEBUG nova.compute.manager [req-cc8b6565-720b-45a4-aeed-949781cdb9a6 req-b013d0c3-f674-4775-a15f-a835ed31f758 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing instance network info cache due to event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 909.819483] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc8b6565-720b-45a4-aeed-949781cdb9a6 req-b013d0c3-f674-4775-a15f-a835ed31f758 service nova] Acquiring lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 909.819643] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc8b6565-720b-45a4-aeed-949781cdb9a6 req-b013d0c3-f674-4775-a15f-a835ed31f758 service nova] Acquired lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 909.819805] env[62619]: DEBUG nova.network.neutron [req-cc8b6565-720b-45a4-aeed-949781cdb9a6 req-b013d0c3-f674-4775-a15f-a835ed31f758 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 910.021208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-42aeba4e-5c87-46d5-9c7c-c6f263c69171" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.022067] env[62619]: DEBUG nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Instance network_info: |[{"id": "9183d7c6-bf8f-4386-b4b0-aa8ed284959a", "address": "fa:16:3e:76:bb:94", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9183d7c6-bf", "ovs_interfaceid": "9183d7c6-bf8f-4386-b4b0-aa8ed284959a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 910.027089] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:76:bb:94', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57c65f87-60fd-4882-ab30-31db49131b46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9183d7c6-bf8f-4386-b4b0-aa8ed284959a', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 910.034943] env[62619]: DEBUG oslo.service.loopingcall [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 910.042360] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 910.042360] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bf041f03-a809-4c10-94b4-d6bcfca25e8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.066961] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 910.066961] env[62619]: value = "task-1365098" [ 910.066961] env[62619]: _type = "Task" [ 910.066961] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.076299] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365098, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.106096] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365097, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.528295] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67a62dfc-daad-48ca-9fa5-3023401c47ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.532633] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68348f4f-ffd2-44aa-ad5e-6acb4cc7a095 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.557279] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59130f8-f49a-460c-a7bf-7f2d3db2d532 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.561369] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance '11542a9b-6556-4b4b-88fe-26c6be2969f6' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 910.603704] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd633606-91b1-44bd-9360-1d3a99889948 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.609694] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365098, 'name': CreateVM_Task, 'duration_secs': 0.444138} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.612581] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 910.613458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.613458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.614081] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 910.614733] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d124cb6-a5f7-474f-8e66-8c5bb59a7ddc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.620762] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365097, 'name': ReconfigVM_Task, 'duration_secs': 0.632663} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.622543] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-729eda45-12c3-44a4-a840-c34c8ea2fe0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.626510] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Reconfigured VM instance instance-00000052 to attach disk [datastore2] 85e279da-e067-46f8-929b-87a013c4e7f4/85e279da-e067-46f8-929b-87a013c4e7f4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 910.628143] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7e62a768-838c-435b-a877-c1bfe014d402 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.629670] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 910.629670] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d0ff9d-380b-2f15-6317-8fcb045b3900" [ 910.629670] env[62619]: _type = "Task" [ 910.629670] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.641479] env[62619]: DEBUG nova.compute.provider_tree [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 910.645056] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 910.645056] env[62619]: value = "task-1365099" [ 910.645056] env[62619]: _type = "Task" [ 910.645056] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.652461] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d0ff9d-380b-2f15-6317-8fcb045b3900, 'name': SearchDatastore_Task, 'duration_secs': 0.023045} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.653083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 910.653419] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 910.653602] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 910.653784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 910.654051] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 910.654546] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd57d321-5d51-4e17-bca5-2adb546fc4f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.659309] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365099, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.674217] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 910.674474] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 910.675242] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ed913ac-5dbe-411c-97f0-2f8d52ec38eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.683550] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 910.683550] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52025627-8ad9-7cb3-6999-4ce96833a70b" [ 910.683550] env[62619]: _type = "Task" [ 910.683550] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.688701] env[62619]: DEBUG nova.network.neutron [req-cc8b6565-720b-45a4-aeed-949781cdb9a6 req-b013d0c3-f674-4775-a15f-a835ed31f758 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updated VIF entry in instance network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 910.688993] env[62619]: DEBUG nova.network.neutron [req-cc8b6565-720b-45a4-aeed-949781cdb9a6 req-b013d0c3-f674-4775-a15f-a835ed31f758 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updating instance_info_cache with network_info: [{"id": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "address": "fa:16:3e:8a:b5:5d", "network": {"id": "774d2a24-62f2-476f-b188-55ab9c218904", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1966786016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4ef452b71f2d4d248f5d016b2076508f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539b4aac-fc", "ovs_interfaceid": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 910.693507] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52025627-8ad9-7cb3-6999-4ce96833a70b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.833640] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ec85614e-67aa-47a4-b746-f4072a49ed70 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "6c2b8244-55b6-4bc8-90c8-a59c2b0b1efe" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 910.833977] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ec85614e-67aa-47a4-b746-f4072a49ed70 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "6c2b8244-55b6-4bc8-90c8-a59c2b0b1efe" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 910.926368] env[62619]: DEBUG nova.network.neutron [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Successfully updated port: 9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 911.068478] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 911.068872] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e7f9f9f6-9aa9-4897-8cc9-d375809cb7d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.077368] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 911.077368] env[62619]: value = "task-1365100" [ 911.077368] env[62619]: _type = "Task" [ 911.077368] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.085857] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365100, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.146775] env[62619]: DEBUG nova.scheduler.client.report [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 911.169301] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365099, 'name': Rename_Task, 'duration_secs': 0.267137} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.170319] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 911.170637] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-d1a0bbe7-6fd1-47f4-aff6-6941df3712b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.179480] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 911.179480] env[62619]: value = "task-1365101" [ 911.179480] env[62619]: _type = "Task" [ 911.179480] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.194765] env[62619]: DEBUG oslo_concurrency.lockutils [req-cc8b6565-720b-45a4-aeed-949781cdb9a6 req-b013d0c3-f674-4775-a15f-a835ed31f758 service nova] Releasing lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.194976] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365101, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.199120] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52025627-8ad9-7cb3-6999-4ce96833a70b, 'name': SearchDatastore_Task, 'duration_secs': 0.028021} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.200128] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9993e0e8-83a6-42cf-9269-2562c5b69b05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.206277] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 911.206277] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523d1fb4-e632-d555-c631-a764b3504e92" [ 911.206277] env[62619]: _type = "Task" [ 911.206277] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.216202] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523d1fb4-e632-d555-c631-a764b3504e92, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.337176] env[62619]: DEBUG nova.compute.manager [None req-ec85614e-67aa-47a4-b746-f4072a49ed70 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 6c2b8244-55b6-4bc8-90c8-a59c2b0b1efe] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 911.429771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.429771] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.429771] env[62619]: DEBUG nova.network.neutron [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 911.589028] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365100, 'name': PowerOffVM_Task, 'duration_secs': 0.23533} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.589028] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 911.589028] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance '11542a9b-6556-4b4b-88fe-26c6be2969f6' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 911.665025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.405s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.665025] env[62619]: DEBUG nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 911.666206] env[62619]: DEBUG oslo_concurrency.lockutils [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.660s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.690554] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365101, 'name': PowerOnVM_Task} progress is 64%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.725206] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523d1fb4-e632-d555-c631-a764b3504e92, 'name': SearchDatastore_Task, 'duration_secs': 0.011922} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 911.726089] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 911.726540] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 42aeba4e-5c87-46d5-9c7c-c6f263c69171/42aeba4e-5c87-46d5-9c7c-c6f263c69171.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 911.727497] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72d37238-bdf6-41e4-bae7-1fdd8e3cc081 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.739025] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 911.739025] env[62619]: value = "task-1365102" [ 911.739025] env[62619]: _type = "Task" [ 911.739025] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.751949] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365102, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.833973] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "fa4e9947-5b99-4447-9535-6dbcaba635f8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.834312] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "fa4e9947-5b99-4447-9535-6dbcaba635f8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.834547] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "fa4e9947-5b99-4447-9535-6dbcaba635f8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 911.834735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "fa4e9947-5b99-4447-9535-6dbcaba635f8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 911.834906] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "fa4e9947-5b99-4447-9535-6dbcaba635f8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 911.837670] env[62619]: INFO nova.compute.manager [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Terminating instance [ 911.839566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "refresh_cache-fa4e9947-5b99-4447-9535-6dbcaba635f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 911.839763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquired lock "refresh_cache-fa4e9947-5b99-4447-9535-6dbcaba635f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 911.840591] env[62619]: DEBUG nova.network.neutron [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 911.845019] env[62619]: DEBUG nova.compute.manager [None req-ec85614e-67aa-47a4-b746-f4072a49ed70 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 6c2b8244-55b6-4bc8-90c8-a59c2b0b1efe] Instance disappeared before build. {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2439}} [ 911.920231] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70ea0212-60c7-45ff-a491-aabc67737f7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.929504] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7612e576-8695-4a33-8b24-f4d49b81bbe3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.966232] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb309e32-27e7-4eb7-bd29-901d31b3e394 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.975878] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31c6f069-5156-457a-9632-35ad758c0015 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.991936] env[62619]: DEBUG nova.compute.provider_tree [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 911.993836] env[62619]: WARNING nova.network.neutron [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] f7c32aa4-863f-481c-899f-debfaa1844da already exists in list: networks containing: ['f7c32aa4-863f-481c-899f-debfaa1844da']. ignoring it [ 912.096743] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 912.097265] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 912.097265] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 912.097451] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 912.097534] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 912.097688] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 912.097893] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 912.098172] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 912.098405] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 912.098622] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 912.098842] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 912.108389] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d6f45efa-1ca8-4e48-882c-d9ecb6740bce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.128863] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 912.128863] env[62619]: value = "task-1365103" [ 912.128863] env[62619]: _type = "Task" [ 912.128863] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.139400] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365103, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.172575] env[62619]: DEBUG nova.compute.utils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 912.174583] env[62619]: DEBUG nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 912.174828] env[62619]: DEBUG nova.network.neutron [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 912.193057] env[62619]: DEBUG oslo_vmware.api [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365101, 'name': PowerOnVM_Task, 'duration_secs': 0.883777} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.193427] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 912.193762] env[62619]: INFO nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Took 10.72 seconds to spawn the instance on the hypervisor. [ 912.193998] env[62619]: DEBUG nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 912.194876] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bbac028-11df-43aa-91ae-5b9a3716a889 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.250669] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365102, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.323962] env[62619]: DEBUG nova.policy [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1e90a23c6444273bc10051f3227804c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '998daea123aa48b2816d1cbe9e662950', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 912.361450] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ec85614e-67aa-47a4-b746-f4072a49ed70 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "6c2b8244-55b6-4bc8-90c8-a59c2b0b1efe" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 1.527s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.372196] env[62619]: DEBUG nova.network.neutron [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 912.451738] env[62619]: DEBUG nova.network.neutron [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.497542] env[62619]: DEBUG nova.scheduler.client.report [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 912.642438] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365103, 'name': ReconfigVM_Task, 'duration_secs': 0.431915} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.646015] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance '11542a9b-6556-4b4b-88fe-26c6be2969f6' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 912.662314] env[62619]: DEBUG nova.compute.manager [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-vif-plugged-9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 912.662918] env[62619]: DEBUG oslo_concurrency.lockutils [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] Acquiring lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 912.663117] env[62619]: DEBUG oslo_concurrency.lockutils [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 912.663317] env[62619]: DEBUG oslo_concurrency.lockutils [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 912.663508] env[62619]: DEBUG nova.compute.manager [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] No waiting events found dispatching network-vif-plugged-9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 912.663684] env[62619]: WARNING nova.compute.manager [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received unexpected event network-vif-plugged-9413dc32-2f0c-4650-952a-63bed028a099 for instance with vm_state active and task_state None. [ 912.663855] env[62619]: DEBUG nova.compute.manager [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-changed-9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 912.664008] env[62619]: DEBUG nova.compute.manager [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing instance network info cache due to event network-changed-9413dc32-2f0c-4650-952a-63bed028a099. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 912.665479] env[62619]: DEBUG oslo_concurrency.lockutils [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.678241] env[62619]: DEBUG nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 912.688244] env[62619]: DEBUG nova.compute.manager [req-4eb51396-f792-4771-8822-6c5861811ed2 req-a10ca531-5e19-4b9c-b41c-daedc61adc6c service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Received event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 912.691135] env[62619]: DEBUG nova.compute.manager [req-4eb51396-f792-4771-8822-6c5861811ed2 req-a10ca531-5e19-4b9c-b41c-daedc61adc6c service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing instance network info cache due to event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 912.691135] env[62619]: DEBUG oslo_concurrency.lockutils [req-4eb51396-f792-4771-8822-6c5861811ed2 req-a10ca531-5e19-4b9c-b41c-daedc61adc6c service nova] Acquiring lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 912.691135] env[62619]: DEBUG oslo_concurrency.lockutils [req-4eb51396-f792-4771-8822-6c5861811ed2 req-a10ca531-5e19-4b9c-b41c-daedc61adc6c service nova] Acquired lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 912.691135] env[62619]: DEBUG nova.network.neutron [req-4eb51396-f792-4771-8822-6c5861811ed2 req-a10ca531-5e19-4b9c-b41c-daedc61adc6c service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 912.726651] env[62619]: INFO nova.compute.manager [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Took 26.92 seconds to build instance. [ 912.756621] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365102, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.679532} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.757355] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 42aeba4e-5c87-46d5-9c7c-c6f263c69171/42aeba4e-5c87-46d5-9c7c-c6f263c69171.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 912.757906] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 912.758044] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-29f3a366-0e76-457f-916b-75c6a4c03f48 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.768548] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 912.768548] env[62619]: value = "task-1365104" [ 912.768548] env[62619]: _type = "Task" [ 912.768548] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.782127] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365104, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.829678] env[62619]: DEBUG nova.network.neutron [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9413dc32-2f0c-4650-952a-63bed028a099", "address": "fa:16:3e:5b:0a:fa", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9413dc32-2f", "ovs_interfaceid": "9413dc32-2f0c-4650-952a-63bed028a099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 912.954637] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Releasing lock "refresh_cache-fa4e9947-5b99-4447-9535-6dbcaba635f8" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 912.955509] env[62619]: DEBUG nova.compute.manager [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 912.955878] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 912.957707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6500c54-2e9e-40d6-8586-2f6d2d99e5c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.970488] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 912.970879] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e1f1b7ff-b933-4e88-856b-de20675e8fda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.981254] env[62619]: DEBUG oslo_vmware.api [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 912.981254] env[62619]: value = "task-1365105" [ 912.981254] env[62619]: _type = "Task" [ 912.981254] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.994534] env[62619]: DEBUG oslo_vmware.api [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365105, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.995514] env[62619]: DEBUG nova.network.neutron [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Successfully created port: 5f723445-4135-4034-b144-6e0a4b2f67fb {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 913.005976] env[62619]: DEBUG oslo_concurrency.lockutils [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.340s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.006098] env[62619]: INFO nova.compute.manager [None req-702a8fbe-cd43-4dc9-aeaf-49067616a2ed tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Successfully reverted task state from rebuilding on failure for instance. [ 913.012124] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.319s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 913.012378] env[62619]: DEBUG nova.objects.instance [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lazy-loading 'resources' on Instance uuid 59b960b6-aa41-4409-a899-9829388c3ff2 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 913.150800] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 913.151120] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 913.151628] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.151855] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 913.152034] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.152193] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 913.152420] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 913.152639] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 913.152842] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 913.153029] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 913.153215] env[62619]: DEBUG nova.virt.hardware [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 913.160057] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Reconfiguring VM instance instance-0000004e to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 913.160702] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03564e1f-1d71-4a2e-916b-b900ef8932c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.183402] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 913.183402] env[62619]: value = "task-1365106" [ 913.183402] env[62619]: _type = "Task" [ 913.183402] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.199141] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365106, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.229677] env[62619]: DEBUG oslo_concurrency.lockutils [None req-343a2437-8101-48c6-8a75-8f6b0e6b5e8c tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "85e279da-e067-46f8-929b-87a013c4e7f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 28.435s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 913.279882] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365104, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.283209} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.280146] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 913.280997] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-392a753c-7529-4b55-8f35-2d639ec44426 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.306801] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Reconfiguring VM instance instance-00000053 to attach disk [datastore1] 42aeba4e-5c87-46d5-9c7c-c6f263c69171/42aeba4e-5c87-46d5-9c7c-c6f263c69171.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.307145] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7d9e63f6-842a-4f19-b664-4aa1b83d67de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.327855] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 913.327855] env[62619]: value = "task-1365107" [ 913.327855] env[62619]: _type = "Task" [ 913.327855] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.333015] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 913.333734] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 913.333897] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.337393] env[62619]: DEBUG oslo_concurrency.lockutils [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 913.337590] env[62619]: DEBUG nova.network.neutron [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing network info cache for port 9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 913.339369] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7183f50-a3c5-4f52-a2d0-3190a260102d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.342317] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365107, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.361558] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 913.361886] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 913.362099] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.362469] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 913.362575] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.362802] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 913.363036] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 913.363517] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 913.363765] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 913.363939] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 913.364134] env[62619]: DEBUG nova.virt.hardware [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 913.372281] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Reconfiguring VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 913.373263] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ba86ee0d-9da4-40af-8bfe-a6023d582ce7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.395719] env[62619]: DEBUG oslo_vmware.api [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 913.395719] env[62619]: value = "task-1365108" [ 913.395719] env[62619]: _type = "Task" [ 913.395719] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.408394] env[62619]: DEBUG oslo_vmware.api [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365108, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.493613] env[62619]: DEBUG oslo_vmware.api [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365105, 'name': PowerOffVM_Task, 'duration_secs': 0.340555} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.494013] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 913.494212] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 913.494479] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a9ae5f13-cb71-43d6-9160-89ef6a8938e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.532066] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 913.532327] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 913.532528] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Deleting the datastore file [datastore1] fa4e9947-5b99-4447-9535-6dbcaba635f8 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.532822] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-74fb8f55-dfbb-4f9c-a6e4-49d9faa9f3bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.542950] env[62619]: DEBUG oslo_vmware.api [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for the task: (returnval){ [ 913.542950] env[62619]: value = "task-1365110" [ 913.542950] env[62619]: _type = "Task" [ 913.542950] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.552197] env[62619]: DEBUG oslo_vmware.api [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365110, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.692529] env[62619]: DEBUG nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 913.701325] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365106, 'name': ReconfigVM_Task, 'duration_secs': 0.377722} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.704744] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Reconfigured VM instance instance-0000004e to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 913.705919] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-221b1aaa-2562-424f-bf8a-a512fb71171a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.736609] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Reconfiguring VM instance instance-0000004e to attach disk [datastore1] 11542a9b-6556-4b4b-88fe-26c6be2969f6/11542a9b-6556-4b4b-88fe-26c6be2969f6.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 913.738030] env[62619]: DEBUG nova.network.neutron [req-4eb51396-f792-4771-8822-6c5861811ed2 req-a10ca531-5e19-4b9c-b41c-daedc61adc6c service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updated VIF entry in instance network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 913.739180] env[62619]: DEBUG nova.network.neutron [req-4eb51396-f792-4771-8822-6c5861811ed2 req-a10ca531-5e19-4b9c-b41c-daedc61adc6c service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updating instance_info_cache with network_info: [{"id": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "address": "fa:16:3e:8a:b5:5d", "network": {"id": "774d2a24-62f2-476f-b188-55ab9c218904", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1966786016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.149", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4ef452b71f2d4d248f5d016b2076508f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539b4aac-fc", "ovs_interfaceid": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 913.744775] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f0e644f5-c34e-4164-bd67-130b54f39646 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.768169] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='ce0117ca9d2cf7f58ae1cad24b1ef8fe',container_format='bare',created_at=2024-10-25T17:05:12Z,direct_url=,disk_format='vmdk',id=4be035b6-4033-4f5c-9c02-c3d1968a1982,min_disk=1,min_ram=0,name='tempest-test-snap-576615866',owner='998daea123aa48b2816d1cbe9e662950',properties=ImageMetaProps,protected=,size=21334016,status='active',tags=,updated_at=2024-10-25T17:05:28Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 913.768480] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 913.768688] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 913.768889] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 913.769054] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 913.769203] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 913.769550] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 913.769767] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 913.769969] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 913.770164] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 913.770404] env[62619]: DEBUG nova.virt.hardware [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 913.771705] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd51b82c-fa07-46f8-9027-f0a801023155 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.775892] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 913.775892] env[62619]: value = "task-1365111" [ 913.775892] env[62619]: _type = "Task" [ 913.775892] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.795964] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365111, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.800855] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22dd96a7-e1cc-4d45-9349-b32d6e082d76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.834668] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f2a358a-920a-4953-bf4a-dc5250ee2be8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.846934] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44acc37d-9c7b-4d1e-a31c-abc1def84956 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.853230] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365107, 'name': ReconfigVM_Task, 'duration_secs': 0.505306} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.853608] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Reconfigured VM instance instance-00000053 to attach disk [datastore1] 42aeba4e-5c87-46d5-9c7c-c6f263c69171/42aeba4e-5c87-46d5-9c7c-c6f263c69171.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 913.854970] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-bf7ea454-ae53-4b49-ab43-d2d943870902 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.889691] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef003c0d-49ba-40c6-bafc-4ea33cdba99a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.893125] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 913.893125] env[62619]: value = "task-1365112" [ 913.893125] env[62619]: _type = "Task" [ 913.893125] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.907976] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7df0f5-50ce-45fc-914f-cea87bf51a7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.915879] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365112, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.931309] env[62619]: DEBUG nova.compute.provider_tree [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 913.932972] env[62619]: DEBUG oslo_vmware.api [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365108, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.055673] env[62619]: DEBUG oslo_vmware.api [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Task: {'id': task-1365110, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.191578} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.056225] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.056490] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 914.056718] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 914.056938] env[62619]: INFO nova.compute.manager [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Took 1.10 seconds to destroy the instance on the hypervisor. [ 914.057472] env[62619]: DEBUG oslo.service.loopingcall [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.057752] env[62619]: DEBUG nova.compute.manager [-] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 914.057864] env[62619]: DEBUG nova.network.neutron [-] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 914.081820] env[62619]: DEBUG nova.network.neutron [-] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 914.115070] env[62619]: DEBUG nova.network.neutron [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updated VIF entry in instance network info cache for port 9413dc32-2f0c-4650-952a-63bed028a099. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 914.115568] env[62619]: DEBUG nova.network.neutron [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9413dc32-2f0c-4650-952a-63bed028a099", "address": "fa:16:3e:5b:0a:fa", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9413dc32-2f", "ovs_interfaceid": "9413dc32-2f0c-4650-952a-63bed028a099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.261346] env[62619]: DEBUG oslo_concurrency.lockutils [req-4eb51396-f792-4771-8822-6c5861811ed2 req-a10ca531-5e19-4b9c-b41c-daedc61adc6c service nova] Releasing lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.289226] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365111, 'name': ReconfigVM_Task, 'duration_secs': 0.496176} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.289583] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Reconfigured VM instance instance-0000004e to attach disk [datastore1] 11542a9b-6556-4b4b-88fe-26c6be2969f6/11542a9b-6556-4b4b-88fe-26c6be2969f6.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 914.289887] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance '11542a9b-6556-4b4b-88fe-26c6be2969f6' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 914.411197] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365112, 'name': Rename_Task, 'duration_secs': 0.160659} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.411949] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 914.412259] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-dc981cce-ce53-4348-87c9-7b49b1c71b69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.417704] env[62619]: DEBUG oslo_vmware.api [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365108, 'name': ReconfigVM_Task, 'duration_secs': 0.787871} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.418595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.419294] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Reconfigured VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 914.426100] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 914.426100] env[62619]: value = "task-1365113" [ 914.426100] env[62619]: _type = "Task" [ 914.426100] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 914.435897] env[62619]: DEBUG nova.scheduler.client.report [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 914.439681] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365113, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.583828] env[62619]: DEBUG nova.network.neutron [-] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.618621] env[62619]: DEBUG oslo_concurrency.lockutils [req-306c5512-a33c-46fb-879c-3f5362461aa2 req-7fcf3706-0e0c-44cc-912c-3b3ce0a26dc5 service nova] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 914.803400] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38358cdb-3457-4aa4-b5c7-b566ba577367 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.831245] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-843228a9-1a9b-4350-945c-59bd99d2779b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.854204] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance '11542a9b-6556-4b4b-88fe-26c6be2969f6' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 914.924411] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2c21ae8a-7354-4e94-a6fa-70f02e26c19e tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "interface-646b4ae6-09e1-4b3c-b17d-392e746df454-9413dc32-2f0c-4650-952a-63bed028a099" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 8.268s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.937335] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365113, 'name': PowerOnVM_Task} progress is 90%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 914.942361] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 914.945621] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.392s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 914.945621] env[62619]: DEBUG nova.objects.instance [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lazy-loading 'resources' on Instance uuid c30e0db3-9b63-44b7-9b7f-810defc530d1 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 914.971559] env[62619]: INFO nova.scheduler.client.report [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleted allocations for instance 59b960b6-aa41-4409-a899-9829388c3ff2 [ 915.086854] env[62619]: INFO nova.compute.manager [-] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Took 1.03 seconds to deallocate network for instance. [ 915.098877] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "85e279da-e067-46f8-929b-87a013c4e7f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.099395] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "85e279da-e067-46f8-929b-87a013c4e7f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.099395] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "85e279da-e067-46f8-929b-87a013c4e7f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.099556] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "85e279da-e067-46f8-929b-87a013c4e7f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.099726] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "85e279da-e067-46f8-929b-87a013c4e7f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.107163] env[62619]: INFO nova.compute.manager [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Terminating instance [ 915.107633] env[62619]: DEBUG nova.compute.manager [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 915.109160] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 915.109160] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0203005a-05b6-4003-a96a-afb2696e47a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.119960] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 915.120417] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-571a12c5-4d48-4424-b6d1-3eb527457475 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.130025] env[62619]: DEBUG oslo_vmware.api [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 915.130025] env[62619]: value = "task-1365114" [ 915.130025] env[62619]: _type = "Task" [ 915.130025] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.143396] env[62619]: DEBUG oslo_vmware.api [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365114, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.168992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.168992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.170904] env[62619]: DEBUG nova.network.neutron [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Successfully updated port: 5f723445-4135-4034-b144-6e0a4b2f67fb {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 915.395947] env[62619]: DEBUG nova.network.neutron [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Port a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 915.438969] env[62619]: DEBUG oslo_vmware.api [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365113, 'name': PowerOnVM_Task, 'duration_secs': 0.649275} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.439356] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 915.439703] env[62619]: INFO nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Took 9.67 seconds to spawn the instance on the hypervisor. [ 915.439945] env[62619]: DEBUG nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 915.440784] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20fe054-052a-4841-b8eb-7e1a0949c153 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.487296] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8707113d-f2dc-4cc8-8e3c-3c021e0905ea tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "59b960b6-aa41-4409-a899-9829388c3ff2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.205s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.604647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.639011] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afbb6ebd-b644-4824-99ad-308ac47e1366 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.644358] env[62619]: DEBUG oslo_vmware.api [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365114, 'name': PowerOffVM_Task, 'duration_secs': 0.276856} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 915.645012] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 915.645235] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 915.645496] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-244df28c-e0fc-42b0-b543-ad32960c35b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.649707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990fe03d-7898-46f9-b7a7-291c90e59c02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.681841] env[62619]: DEBUG nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 915.684710] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "refresh_cache-ef1e80cf-2ea2-4764-851a-8aa97563a278" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.684847] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "refresh_cache-ef1e80cf-2ea2-4764-851a-8aa97563a278" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.685299] env[62619]: DEBUG nova.network.neutron [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 915.686911] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c563e508-b73e-4093-9848-dd8ff9fdade6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.696418] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9776ecd3-0f38-4651-a10f-0ae1e1d5ae39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.711504] env[62619]: DEBUG nova.compute.provider_tree [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 915.730663] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 915.731025] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 915.731337] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleting the datastore file [datastore2] 85e279da-e067-46f8-929b-87a013c4e7f4 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 915.732012] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-305cfc34-9660-480e-9b69-bdf978cf8caa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 915.744018] env[62619]: DEBUG oslo_vmware.api [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 915.744018] env[62619]: value = "task-1365116" [ 915.744018] env[62619]: _type = "Task" [ 915.744018] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 915.753483] env[62619]: DEBUG oslo_vmware.api [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365116, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 915.782166] env[62619]: DEBUG nova.compute.manager [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Received event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.782656] env[62619]: DEBUG nova.compute.manager [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing instance network info cache due to event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 915.782656] env[62619]: DEBUG oslo_concurrency.lockutils [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] Acquiring lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 915.782815] env[62619]: DEBUG oslo_concurrency.lockutils [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] Acquired lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 915.782940] env[62619]: DEBUG nova.network.neutron [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 915.810327] env[62619]: DEBUG nova.compute.manager [req-24807012-9dff-445d-9713-ee2246eab160 req-d6154035-c211-4ccb-895e-6b658e415878 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Received event network-vif-plugged-5f723445-4135-4034-b144-6e0a4b2f67fb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 915.811676] env[62619]: DEBUG oslo_concurrency.lockutils [req-24807012-9dff-445d-9713-ee2246eab160 req-d6154035-c211-4ccb-895e-6b658e415878 service nova] Acquiring lock "ef1e80cf-2ea2-4764-851a-8aa97563a278-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 915.814522] env[62619]: DEBUG oslo_concurrency.lockutils [req-24807012-9dff-445d-9713-ee2246eab160 req-d6154035-c211-4ccb-895e-6b658e415878 service nova] Lock "ef1e80cf-2ea2-4764-851a-8aa97563a278-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 915.814522] env[62619]: DEBUG oslo_concurrency.lockutils [req-24807012-9dff-445d-9713-ee2246eab160 req-d6154035-c211-4ccb-895e-6b658e415878 service nova] Lock "ef1e80cf-2ea2-4764-851a-8aa97563a278-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 915.814522] env[62619]: DEBUG nova.compute.manager [req-24807012-9dff-445d-9713-ee2246eab160 req-d6154035-c211-4ccb-895e-6b658e415878 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] No waiting events found dispatching network-vif-plugged-5f723445-4135-4034-b144-6e0a4b2f67fb {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 915.814522] env[62619]: WARNING nova.compute.manager [req-24807012-9dff-445d-9713-ee2246eab160 req-d6154035-c211-4ccb-895e-6b658e415878 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Received unexpected event network-vif-plugged-5f723445-4135-4034-b144-6e0a4b2f67fb for instance with vm_state building and task_state spawning. [ 915.959889] env[62619]: INFO nova.compute.manager [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Took 22.10 seconds to build instance. [ 916.213774] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.215356] env[62619]: DEBUG nova.scheduler.client.report [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 916.228100] env[62619]: DEBUG nova.network.neutron [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 916.260466] env[62619]: DEBUG oslo_vmware.api [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365116, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.425072} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 916.260466] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 916.260466] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 916.260466] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 916.260952] env[62619]: INFO nova.compute.manager [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 916.260952] env[62619]: DEBUG oslo.service.loopingcall [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 916.261196] env[62619]: DEBUG nova.compute.manager [-] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 916.261314] env[62619]: DEBUG nova.network.neutron [-] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 916.422758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.423063] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.423671] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.462674] env[62619]: DEBUG oslo_concurrency.lockutils [None req-6bac82cd-a32f-4a48-83ff-6bee2fed6014 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.607s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.525908] env[62619]: DEBUG nova.network.neutron [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Updating instance_info_cache with network_info: [{"id": "5f723445-4135-4034-b144-6e0a4b2f67fb", "address": "fa:16:3e:6d:02:2b", "network": {"id": "17acf28e-3c95-4a28-a211-8598f95532aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1059820077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998daea123aa48b2816d1cbe9e662950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f723445-41", "ovs_interfaceid": "5f723445-4135-4034-b144-6e0a4b2f67fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.571269] env[62619]: DEBUG nova.network.neutron [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updated VIF entry in instance network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 916.571547] env[62619]: DEBUG nova.network.neutron [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updating instance_info_cache with network_info: [{"id": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "address": "fa:16:3e:8a:b5:5d", "network": {"id": "774d2a24-62f2-476f-b188-55ab9c218904", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1966786016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4ef452b71f2d4d248f5d016b2076508f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539b4aac-fc", "ovs_interfaceid": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 916.721339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.723750] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.119s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.724025] env[62619]: DEBUG nova.objects.instance [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lazy-loading 'resources' on Instance uuid fa4e9947-5b99-4447-9535-6dbcaba635f8 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 916.742324] env[62619]: WARNING oslo_messaging._drivers.amqpdriver [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Number of call queues is 11, greater than warning threshold: 10. There could be a leak. Increasing threshold to: 20 [ 916.865607] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "ca452ef6-d777-46dd-a313-ae7dd441adca" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.865894] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.866132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "ca452ef6-d777-46dd-a313-ae7dd441adca-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.866324] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.866497] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.870964] env[62619]: INFO nova.compute.manager [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Terminating instance [ 916.873411] env[62619]: DEBUG nova.compute.manager [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 916.873789] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 916.875115] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-412ccdc2-f71a-44ec-ab98-ac8a2a0bad9b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.883658] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 916.884041] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d1e2ca78-86da-4698-8a8a-2556e365c0d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.891275] env[62619]: DEBUG oslo_vmware.api [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 916.891275] env[62619]: value = "task-1365117" [ 916.891275] env[62619]: _type = "Task" [ 916.891275] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.895071] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.895247] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.895454] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.895625] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.895791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 916.899258] env[62619]: INFO nova.compute.manager [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Terminating instance [ 916.905165] env[62619]: DEBUG oslo_vmware.api [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365117, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.905980] env[62619]: DEBUG nova.compute.manager [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 916.906245] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 916.907223] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-896b7924-d54f-4e87-adda-8789195d65d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.916033] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 916.918625] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b5feef22-ad7b-46b4-ab6a-eb8aed20d5b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 916.926139] env[62619]: DEBUG oslo_vmware.api [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 916.926139] env[62619]: value = "task-1365118" [ 916.926139] env[62619]: _type = "Task" [ 916.926139] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 916.936016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.936376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 916.943093] env[62619]: DEBUG oslo_vmware.api [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365118, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 916.971710] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "interface-646b4ae6-09e1-4b3c-b17d-392e746df454-9413dc32-2f0c-4650-952a-63bed028a099" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 916.974277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "interface-646b4ae6-09e1-4b3c-b17d-392e746df454-9413dc32-2f0c-4650-952a-63bed028a099" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.023840] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.024141] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.024369] env[62619]: DEBUG nova.compute.manager [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 917.025284] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51e61c9a-fd83-4711-8349-74e7f200b524 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.028779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "refresh_cache-ef1e80cf-2ea2-4764-851a-8aa97563a278" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.029082] env[62619]: DEBUG nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Instance network_info: |[{"id": "5f723445-4135-4034-b144-6e0a4b2f67fb", "address": "fa:16:3e:6d:02:2b", "network": {"id": "17acf28e-3c95-4a28-a211-8598f95532aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1059820077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998daea123aa48b2816d1cbe9e662950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f723445-41", "ovs_interfaceid": "5f723445-4135-4034-b144-6e0a4b2f67fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 917.029491] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:02:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5f723445-4135-4034-b144-6e0a4b2f67fb', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 917.037156] env[62619]: DEBUG oslo.service.loopingcall [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 917.037870] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 917.038143] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-98101d92-d0c8-4db8-9e43-ebd5cb35b823 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.055236] env[62619]: DEBUG nova.compute.manager [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 917.055897] env[62619]: DEBUG nova.objects.instance [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lazy-loading 'flavor' on Instance uuid 42aeba4e-5c87-46d5-9c7c-c6f263c69171 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 917.063974] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 917.063974] env[62619]: value = "task-1365119" [ 917.063974] env[62619]: _type = "Task" [ 917.063974] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.075103] env[62619]: DEBUG oslo_concurrency.lockutils [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] Releasing lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 917.075354] env[62619]: DEBUG nova.compute.manager [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Received event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.075545] env[62619]: DEBUG nova.compute.manager [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing instance network info cache due to event network-changed-539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 917.075762] env[62619]: DEBUG oslo_concurrency.lockutils [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] Acquiring lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.075908] env[62619]: DEBUG oslo_concurrency.lockutils [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] Acquired lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.076087] env[62619]: DEBUG nova.network.neutron [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Refreshing network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 917.077199] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365119, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.188566] env[62619]: DEBUG nova.network.neutron [-] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.247542] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ffc5966-3938-422f-b53c-3f940292934a tempest-ServerActionsV293TestJSON-959671216 tempest-ServerActionsV293TestJSON-959671216-project-member] Lock "c30e0db3-9b63-44b7-9b7f-810defc530d1" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.711s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 917.404869] env[62619]: DEBUG oslo_vmware.api [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365117, 'name': PowerOffVM_Task, 'duration_secs': 0.246023} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.405037] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.405234] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.405456] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e3597575-285e-42b1-9958-4557a3f26b5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.440740] env[62619]: DEBUG oslo_vmware.api [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365118, 'name': PowerOffVM_Task, 'duration_secs': 0.280129} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.445671] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 917.446023] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 917.446515] env[62619]: DEBUG nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 917.451942] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f47ae300-e804-4f77-b591-0a6514f10199 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.467532] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-01ba85e6-32cf-47fa-86c5-52f148a1dc58 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.476376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.476566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.477439] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db47e0e-ec38-4ef7-b7aa-3b89c03ea25f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.480750] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b73ec51f-f228-4ddf-8edb-ca6199dd5dc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.489707] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.489885] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.490118] env[62619]: DEBUG nova.network.neutron [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 917.494541] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.494839] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.494987] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleting the datastore file [datastore2] ca452ef6-d777-46dd-a313-ae7dd441adca {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.495776] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-581b0b0f-bee1-4604-95d4-10dae795f6f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.539662] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-032b10c9-f107-4097-9cf2-aca15c5c8d32 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.543680] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4b8c46-13a4-4295-9e96-46d4ca04646b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.548704] env[62619]: DEBUG oslo_vmware.api [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for the task: (returnval){ [ 917.548704] env[62619]: value = "task-1365122" [ 917.548704] env[62619]: _type = "Task" [ 917.548704] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.550018] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 917.550256] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 917.550456] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Deleting the datastore file [datastore2] 33998dc6-3be4-4b78-af12-0ad7bfab70c6 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 917.554406] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-afa201c7-a919-4460-81df-0a2f538a099f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.584448] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Reconfiguring VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 917.585150] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 917.587242] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dc8f226-9828-482b-8eef-0911435e6f8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.598662] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bdabbbf2-21bd-4bf4-bb85-4b0a9ba08ca5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.614820] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0fcb0960-1913-4b5f-8995-e2e405b7b4a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.623011] env[62619]: DEBUG oslo_vmware.api [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365122, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.623430] env[62619]: DEBUG oslo_vmware.api [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for the task: (returnval){ [ 917.623430] env[62619]: value = "task-1365123" [ 917.623430] env[62619]: _type = "Task" [ 917.623430] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.645567] env[62619]: DEBUG nova.compute.provider_tree [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 917.647094] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365119, 'name': CreateVM_Task, 'duration_secs': 0.521738} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 917.649409] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 917.649824] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 917.649824] env[62619]: value = "task-1365124" [ 917.649824] env[62619]: _type = "Task" [ 917.649824] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.650505] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.650690] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.651132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 917.651541] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd5c66a6-76e7-4aa8-806f-74248108c484 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 917.661203] env[62619]: DEBUG oslo_vmware.api [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 917.661203] env[62619]: value = "task-1365125" [ 917.661203] env[62619]: _type = "Task" [ 917.661203] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.661556] env[62619]: DEBUG oslo_vmware.api [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365123, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.680284] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 917.680284] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5254d965-9a46-aeae-37ba-b374a320988e" [ 917.680284] env[62619]: _type = "Task" [ 917.680284] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 917.680566] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.688444] env[62619]: DEBUG oslo_vmware.api [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365125, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.691522] env[62619]: INFO nova.compute.manager [-] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Took 1.43 seconds to deallocate network for instance. [ 917.697288] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5254d965-9a46-aeae-37ba-b374a320988e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 917.772109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 917.772369] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 917.884294] env[62619]: DEBUG nova.compute.manager [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Received event network-changed-5f723445-4135-4034-b144-6e0a4b2f67fb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 917.884496] env[62619]: DEBUG nova.compute.manager [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Refreshing instance network info cache due to event network-changed-5f723445-4135-4034-b144-6e0a4b2f67fb. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 917.884719] env[62619]: DEBUG oslo_concurrency.lockutils [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] Acquiring lock "refresh_cache-ef1e80cf-2ea2-4764-851a-8aa97563a278" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 917.884990] env[62619]: DEBUG oslo_concurrency.lockutils [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] Acquired lock "refresh_cache-ef1e80cf-2ea2-4764-851a-8aa97563a278" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 917.885094] env[62619]: DEBUG nova.network.neutron [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Refreshing network info cache for port 5f723445-4135-4034-b144-6e0a4b2f67fb {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 917.891423] env[62619]: DEBUG nova.network.neutron [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updated VIF entry in instance network info cache for port 539b4aac-fc64-4dc2-a020-cf6440bd00d8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 917.891982] env[62619]: DEBUG nova.network.neutron [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updating instance_info_cache with network_info: [{"id": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "address": "fa:16:3e:8a:b5:5d", "network": {"id": "774d2a24-62f2-476f-b188-55ab9c218904", "bridge": "br-int", "label": "tempest-ServerRescueTestJSONUnderV235-1966786016-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": false}}], "meta": {"injected": false, "tenant_id": "4ef452b71f2d4d248f5d016b2076508f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "40c947c4-f471-4d48-8e43-fee54198107e", "external-id": "nsx-vlan-transportzone-203", "segmentation_id": 203, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap539b4aac-fc", "ovs_interfaceid": "539b4aac-fc64-4dc2-a020-cf6440bd00d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 917.972058] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.060085] env[62619]: DEBUG oslo_vmware.api [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Task: {'id': task-1365122, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.176005} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.060365] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.060561] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.060744] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.060917] env[62619]: INFO nova.compute.manager [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Took 1.19 seconds to destroy the instance on the hypervisor. [ 918.061186] env[62619]: DEBUG oslo.service.loopingcall [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.063730] env[62619]: DEBUG nova.compute.manager [-] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 918.063730] env[62619]: DEBUG nova.network.neutron [-] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 918.138574] env[62619]: DEBUG oslo_vmware.api [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Task: {'id': task-1365123, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.227212} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.138901] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 918.139144] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 918.139391] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 918.139633] env[62619]: INFO nova.compute.manager [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Took 1.23 seconds to destroy the instance on the hypervisor. [ 918.139879] env[62619]: DEBUG oslo.service.loopingcall [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 918.140154] env[62619]: DEBUG nova.compute.manager [-] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 918.140280] env[62619]: DEBUG nova.network.neutron [-] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 918.149147] env[62619]: DEBUG nova.scheduler.client.report [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 918.163332] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.174510] env[62619]: DEBUG oslo_vmware.api [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365125, 'name': PowerOffVM_Task, 'duration_secs': 0.202947} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 918.175827] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 918.175827] env[62619]: DEBUG nova.compute.manager [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 918.176858] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f607e85a-a012-43b3-a6ae-b5624517e8b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.196476] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.196809] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Processing image 4be035b6-4033-4f5c-9c02-c3d1968a1982 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 918.197134] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982/4be035b6-4033-4f5c-9c02-c3d1968a1982.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 918.197342] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982/4be035b6-4033-4f5c-9c02-c3d1968a1982.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 918.197562] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 918.197864] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-36af31bd-cedd-47d8-a4a7-7b713b803b0a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.202833] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.209844] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 918.210150] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 918.210889] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57b7debb-c69d-428f-a2e6-33003ad38a77 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.217392] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 918.217392] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5239506e-8447-03ae-ba91-ec38411533fd" [ 918.217392] env[62619]: _type = "Task" [ 918.217392] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 918.226952] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5239506e-8447-03ae-ba91-ec38411533fd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.267986] env[62619]: DEBUG nova.network.neutron [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance_info_cache with network_info: [{"id": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "address": "fa:16:3e:d5:c3:bc", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0389ef1-cf", "ovs_interfaceid": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.275105] env[62619]: DEBUG nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 918.391368] env[62619]: DEBUG nova.compute.manager [req-535f9c28-8e76-4ee6-8392-03d8c9f98b50 req-cbccff59-ce70-4316-b8dc-87cf0ac0b8eb service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Received event network-vif-deleted-6ecf8988-6ce6-4b80-b927-57c2ef3a8100 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 918.391756] env[62619]: INFO nova.compute.manager [req-535f9c28-8e76-4ee6-8392-03d8c9f98b50 req-cbccff59-ce70-4316-b8dc-87cf0ac0b8eb service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Neutron deleted interface 6ecf8988-6ce6-4b80-b927-57c2ef3a8100; detaching it from the instance and deleting it from the info cache [ 918.392095] env[62619]: DEBUG nova.network.neutron [req-535f9c28-8e76-4ee6-8392-03d8c9f98b50 req-cbccff59-ce70-4316-b8dc-87cf0ac0b8eb service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.395764] env[62619]: DEBUG oslo_concurrency.lockutils [req-b61f9e8d-e265-4a0f-aad9-bd2bcb63e517 req-59c88758-8bc3-4b33-87ce-a9b24a1049f6 service nova] Releasing lock "refresh_cache-33998dc6-3be4-4b78-af12-0ad7bfab70c6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.607851] env[62619]: DEBUG nova.network.neutron [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Updated VIF entry in instance network info cache for port 5f723445-4135-4034-b144-6e0a4b2f67fb. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 918.607851] env[62619]: DEBUG nova.network.neutron [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Updating instance_info_cache with network_info: [{"id": "5f723445-4135-4034-b144-6e0a4b2f67fb", "address": "fa:16:3e:6d:02:2b", "network": {"id": "17acf28e-3c95-4a28-a211-8598f95532aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1059820077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998daea123aa48b2816d1cbe9e662950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5f723445-41", "ovs_interfaceid": "5f723445-4135-4034-b144-6e0a4b2f67fb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.654927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.930s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.660758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 2.447s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 918.662429] env[62619]: INFO nova.compute.claims [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 918.672907] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 918.680570] env[62619]: INFO nova.scheduler.client.report [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Deleted allocations for instance fa4e9947-5b99-4447-9535-6dbcaba635f8 [ 918.692928] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d8888d7b-feeb-4b7e-860c-8359c41aa58c tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.669s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 918.728778] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 918.729070] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Fetch image to [datastore1] OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab/OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 918.729279] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Downloading stream optimized image 4be035b6-4033-4f5c-9c02-c3d1968a1982 to [datastore1] OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab/OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab.vmdk on the data store datastore1 as vApp {{(pid=62619) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 918.729543] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Downloading image file data 4be035b6-4033-4f5c-9c02-c3d1968a1982 to the ESX as VM named 'OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab' {{(pid=62619) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 918.774138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 918.798547] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 918.812854] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 918.812854] env[62619]: value = "resgroup-9" [ 918.812854] env[62619]: _type = "ResourcePool" [ 918.812854] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 918.813191] env[62619]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-f4e32974-228d-4a22-a152-3898a9406e8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.838330] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lease: (returnval){ [ 918.838330] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5204594d-cd0b-bfe6-5d7a-d087b8d24397" [ 918.838330] env[62619]: _type = "HttpNfcLease" [ 918.838330] env[62619]: } obtained for vApp import into resource pool (val){ [ 918.838330] env[62619]: value = "resgroup-9" [ 918.838330] env[62619]: _type = "ResourcePool" [ 918.838330] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 918.838684] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the lease: (returnval){ [ 918.838684] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5204594d-cd0b-bfe6-5d7a-d087b8d24397" [ 918.838684] env[62619]: _type = "HttpNfcLease" [ 918.838684] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 918.846960] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 918.846960] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5204594d-cd0b-bfe6-5d7a-d087b8d24397" [ 918.846960] env[62619]: _type = "HttpNfcLease" [ 918.846960] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 918.866295] env[62619]: DEBUG nova.network.neutron [-] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.894765] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d5dc07de-ea7c-46aa-8df0-4ba270d5f931 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.905547] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da5f4ad3-6c6e-4505-b31f-744135694f53 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 918.938210] env[62619]: DEBUG nova.network.neutron [-] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 918.939709] env[62619]: DEBUG nova.compute.manager [req-535f9c28-8e76-4ee6-8392-03d8c9f98b50 req-cbccff59-ce70-4316-b8dc-87cf0ac0b8eb service nova] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Detach interface failed, port_id=6ecf8988-6ce6-4b80-b927-57c2ef3a8100, reason: Instance ca452ef6-d777-46dd-a313-ae7dd441adca could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 919.110796] env[62619]: DEBUG oslo_concurrency.lockutils [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] Releasing lock "refresh_cache-ef1e80cf-2ea2-4764-851a-8aa97563a278" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 919.111190] env[62619]: DEBUG nova.compute.manager [req-968331e1-1773-4020-9252-7f3674bc4d84 req-8cd24239-8cfa-4658-870f-76cd91858319 service nova] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Received event network-vif-deleted-b6291b55-9d26-4a33-8cef-87166b1a2c83 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.171881] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.190533] env[62619]: DEBUG oslo_concurrency.lockutils [None req-28003c19-1818-4cef-bc3e-0640d1774efe tempest-ServerShowV247Test-792032551 tempest-ServerShowV247Test-792032551-project-member] Lock "fa4e9947-5b99-4447-9535-6dbcaba635f8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.356s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.297764] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82a1ca4-e172-4f49-b11b-904947035bff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.320181] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a79e42a-e8fb-48e5-a102-88a900f1af64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.332139] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance '11542a9b-6556-4b4b-88fe-26c6be2969f6' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 919.347406] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 919.347406] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5204594d-cd0b-bfe6-5d7a-d087b8d24397" [ 919.347406] env[62619]: _type = "HttpNfcLease" [ 919.347406] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 919.369092] env[62619]: INFO nova.compute.manager [-] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Took 1.31 seconds to deallocate network for instance. [ 919.441921] env[62619]: INFO nova.compute.manager [-] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Took 1.30 seconds to deallocate network for instance. [ 919.665801] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.666107] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.666226] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.666423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 919.666649] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 919.669018] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.669276] env[62619]: INFO nova.compute.manager [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Terminating instance [ 919.671521] env[62619]: DEBUG nova.compute.manager [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 919.671784] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 919.672748] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14111ed6-f136-456e-8299-a7ca95b221c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.686049] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 919.686547] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-6afbeab1-e89a-42e1-8a1d-5e389bfe6be4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.768091] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 919.768479] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 919.768810] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleting the datastore file [datastore1] 42aeba4e-5c87-46d5-9c7c-c6f263c69171 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 919.772233] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1d8ee617-3671-4166-9b6d-797304268c62 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.779692] env[62619]: DEBUG oslo_vmware.api [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 919.779692] env[62619]: value = "task-1365128" [ 919.779692] env[62619]: _type = "Task" [ 919.779692] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.793847] env[62619]: DEBUG oslo_vmware.api [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365128, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.840487] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 919.840831] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-abe6637e-ced6-4fc2-b756-43b63a13e9f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.853815] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 919.853815] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5204594d-cd0b-bfe6-5d7a-d087b8d24397" [ 919.853815] env[62619]: _type = "HttpNfcLease" [ 919.853815] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 919.854430] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 919.854430] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5204594d-cd0b-bfe6-5d7a-d087b8d24397" [ 919.854430] env[62619]: _type = "HttpNfcLease" [ 919.854430] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 919.854777] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 919.854777] env[62619]: value = "task-1365129" [ 919.854777] env[62619]: _type = "Task" [ 919.854777] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 919.855492] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bc0651e-e09b-4710-b278-bd0347886ab1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.870110] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Found VMDK URL: https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d18824-de54-9bc3-445a-f167c776bf83/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 919.870482] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating HTTP connection to write to file with size = 21334016 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d18824-de54-9bc3-445a-f167c776bf83/disk-0.vmdk. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 919.878041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.878556] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365129, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 919.968718] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 919.975231] env[62619]: DEBUG nova.compute.manager [req-329ddf7d-a4eb-48af-958d-78fb0a2612a6 req-988c0a5c-1c80-4862-87c7-14a93fc1b673 service nova] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Received event network-vif-deleted-539b4aac-fc64-4dc2-a020-cf6440bd00d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 919.986821] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-ad92bfc5-860b-46af-adf4-552a72e48f06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 919.997342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46c4d5fd-b7d4-4673-8186-2c4a58576506 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.006981] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-640de89b-90fe-4365-9acc-ae389da2a39e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.043618] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d93dfbae-04c6-4a54-8fab-83b46286c765 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.052902] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f5b6528-6b0c-4d3b-97f2-6b2c3481ea19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.067505] env[62619]: DEBUG nova.compute.provider_tree [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 920.165579] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.292884] env[62619]: DEBUG oslo_vmware.api [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365128, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.181217} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 920.295490] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 920.295838] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 920.296195] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 920.296533] env[62619]: INFO nova.compute.manager [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Took 0.62 seconds to destroy the instance on the hypervisor. [ 920.296913] env[62619]: DEBUG oslo.service.loopingcall [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 920.297174] env[62619]: DEBUG nova.compute.manager [-] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 920.297293] env[62619]: DEBUG nova.network.neutron [-] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 920.373156] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365129, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.596359] env[62619]: ERROR nova.scheduler.client.report [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [req-2aecc563-fd94-45df-a802-48b9ab82d75c] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2aecc563-fd94-45df-a802-48b9ab82d75c"}]} [ 920.614928] env[62619]: DEBUG nova.scheduler.client.report [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 920.633990] env[62619]: DEBUG nova.scheduler.client.report [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 920.634360] env[62619]: DEBUG nova.compute.provider_tree [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 920.645582] env[62619]: DEBUG nova.scheduler.client.report [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 920.668633] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.672011] env[62619]: DEBUG nova.scheduler.client.report [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 920.715822] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 920.716129] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Getting lease state for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d18824-de54-9bc3-445a-f167c776bf83/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 920.717192] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd82e4c-d307-4061-8780-1706061c148f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.727161] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d18824-de54-9bc3-445a-f167c776bf83/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 920.727362] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lease for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d18824-de54-9bc3-445a-f167c776bf83/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 920.727607] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-ce524161-bd5a-467d-9062-f076f96fc74f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.830110] env[62619]: DEBUG nova.compute.manager [req-3cde0233-0403-440e-afca-4d8e62a9ba0c req-d35b8324-ba88-413d-ac2b-f7c518276f63 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Received event network-vif-deleted-9183d7c6-bf8f-4386-b4b0-aa8ed284959a {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 920.830110] env[62619]: INFO nova.compute.manager [req-3cde0233-0403-440e-afca-4d8e62a9ba0c req-d35b8324-ba88-413d-ac2b-f7c518276f63 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Neutron deleted interface 9183d7c6-bf8f-4386-b4b0-aa8ed284959a; detaching it from the instance and deleting it from the info cache [ 920.830110] env[62619]: DEBUG nova.network.neutron [req-3cde0233-0403-440e-afca-4d8e62a9ba0c req-d35b8324-ba88-413d-ac2b-f7c518276f63 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 920.870985] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365129, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 920.886980] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48837806-d708-49f5-b91a-1f08214ab5a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.895684] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8973f13-ab45-472c-bdbe-766bec2e8996 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.902374] env[62619]: DEBUG oslo_vmware.rw_handles [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Closed VMDK write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap/nfc/52d18824-de54-9bc3-445a-f167c776bf83/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 920.902374] env[62619]: INFO nova.virt.vmwareapi.images [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Downloaded image file data 4be035b6-4033-4f5c-9c02-c3d1968a1982 [ 920.903129] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dacff08-47d5-4d27-a292-69ceb4603e05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.930825] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15ca0777-65aa-49e8-8840-1a7f40f2bd62 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.944503] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e4dad06-e9e8-40eb-ad4d-bda074eaa66a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.949691] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4fe65c7-1b4f-46cf-9d63-b8fe411890ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 920.963633] env[62619]: DEBUG nova.compute.provider_tree [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 920.983819] env[62619]: INFO nova.virt.vmwareapi.images [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] The imported VM was unregistered [ 920.986407] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 920.986754] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating directory with path [datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 920.987064] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dc7d25c1-8f2f-4323-b563-c747a8e7e9b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.000706] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Created directory with path [datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 921.000929] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab/OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab.vmdk to [datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982/4be035b6-4033-4f5c-9c02-c3d1968a1982.vmdk. {{(pid=62619) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 921.001246] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-44a3842f-443d-4534-a8a7-aaab50c1d233 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.012947] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 921.012947] env[62619]: value = "task-1365131" [ 921.012947] env[62619]: _type = "Task" [ 921.012947] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 921.021673] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365131, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.166025] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.305664] env[62619]: DEBUG nova.network.neutron [-] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 921.333576] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-99582b5e-f9b7-4874-b789-4478e03a9d9a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.346689] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6550d72-ab62-40e2-8812-a2ff90a76298 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 921.371048] env[62619]: DEBUG oslo_vmware.api [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365129, 'name': PowerOnVM_Task, 'duration_secs': 1.5122} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 921.371048] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 921.371048] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-64fa8c3f-afad-47a9-a19a-84549b8a8f70 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance '11542a9b-6556-4b4b-88fe-26c6be2969f6' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 921.383250] env[62619]: DEBUG nova.compute.manager [req-3cde0233-0403-440e-afca-4d8e62a9ba0c req-d35b8324-ba88-413d-ac2b-f7c518276f63 service nova] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Detach interface failed, port_id=9183d7c6-bf8f-4386-b4b0-aa8ed284959a, reason: Instance 42aeba4e-5c87-46d5-9c7c-c6f263c69171 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 921.504681] env[62619]: DEBUG nova.scheduler.client.report [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 102 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 921.505290] env[62619]: DEBUG nova.compute.provider_tree [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 102 to 103 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 921.505487] env[62619]: DEBUG nova.compute.provider_tree [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 921.524030] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365131, 'name': MoveVirtualDisk_Task} progress is 15%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.670151] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 921.808517] env[62619]: INFO nova.compute.manager [-] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Took 1.51 seconds to deallocate network for instance. [ 922.012066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.351s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 922.012640] env[62619]: DEBUG nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 922.017817] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.045s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 922.019052] env[62619]: INFO nova.compute.claims [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 922.034414] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365131, 'name': MoveVirtualDisk_Task} progress is 35%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.168627] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.315749] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 922.518848] env[62619]: DEBUG nova.compute.utils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 922.520405] env[62619]: DEBUG nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 922.520583] env[62619]: DEBUG nova.network.neutron [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 922.542091] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365131, 'name': MoveVirtualDisk_Task} progress is 54%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 922.640291] env[62619]: DEBUG nova.policy [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3581d242034a4c8b00417545bd52ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61b5e689e5544e6857baf8d3c52fe0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 922.677078] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.026468] env[62619]: DEBUG nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 923.056296] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365131, 'name': MoveVirtualDisk_Task} progress is 74%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.182056] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.227607] env[62619]: DEBUG nova.network.neutron [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Successfully created port: 947d01f5-2fdd-4496-9e40-26ff1765031b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 923.305183] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-831c8c0b-3411-4718-b4da-ca19cb71a35d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.314768] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3309cd64-338f-4a9e-b985-1e489ee23307 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.352441] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2489ac1b-9bad-4c56-9458-3ebcbcb19807 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.361842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa94ccff-4105-49e5-94af-f4829f610037 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 923.378026] env[62619]: DEBUG nova.compute.provider_tree [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 923.555387] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365131, 'name': MoveVirtualDisk_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.675927] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 923.755475] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 923.755765] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 923.756095] env[62619]: DEBUG nova.compute.manager [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Going to confirm migration 1 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 923.881252] env[62619]: DEBUG nova.scheduler.client.report [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 924.050666] env[62619]: DEBUG nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 924.052706] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365131, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.848671} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.054527] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab/OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab.vmdk to [datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982/4be035b6-4033-4f5c-9c02-c3d1968a1982.vmdk. [ 924.054527] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Cleaning up location [datastore1] OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 924.054527] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_023062aa-e7e2-4155-a08f-39d73c52b9ab {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 924.054527] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e3810cd-b1df-4c0b-a2e7-0b0368f64dbf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.062461] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 924.062461] env[62619]: value = "task-1365132" [ 924.062461] env[62619]: _type = "Task" [ 924.062461] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.073638] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365132, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.087112] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 924.087373] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 924.087534] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 924.087718] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 924.088215] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 924.088430] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 924.088700] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 924.088899] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 924.089099] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 924.089277] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 924.089458] env[62619]: DEBUG nova.virt.hardware [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 924.090346] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-270cdb87-7ef2-4a57-b150-b6cb6b280b5e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.099302] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd8d33d5-28b2-47f3-b218-42a119de66fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.174666] env[62619]: DEBUG oslo_vmware.api [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365124, 'name': ReconfigVM_Task, 'duration_secs': 6.228281} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.174927] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.175166] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Reconfigured VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 924.386395] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 924.386591] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 924.386775] env[62619]: DEBUG nova.network.neutron [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 924.386965] env[62619]: DEBUG nova.objects.instance [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lazy-loading 'info_cache' on Instance uuid 11542a9b-6556-4b4b-88fe-26c6be2969f6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.389046] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.372s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 924.389565] env[62619]: DEBUG nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 924.392513] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.190s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 924.396106] env[62619]: DEBUG nova.objects.instance [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lazy-loading 'resources' on Instance uuid 85e279da-e067-46f8-929b-87a013c4e7f4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 924.575619] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365132, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.053599} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 924.575879] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 924.576061] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982/4be035b6-4033-4f5c-9c02-c3d1968a1982.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 924.576317] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982/4be035b6-4033-4f5c-9c02-c3d1968a1982.vmdk to [datastore1] ef1e80cf-2ea2-4764-851a-8aa97563a278/ef1e80cf-2ea2-4764-851a-8aa97563a278.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 924.576580] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3ec18748-841c-4eaf-ac31-d2c341789bae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 924.584676] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 924.584676] env[62619]: value = "task-1365133" [ 924.584676] env[62619]: _type = "Task" [ 924.584676] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 924.593890] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 924.900312] env[62619]: DEBUG nova.compute.utils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 924.905468] env[62619]: DEBUG nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 924.905742] env[62619]: DEBUG nova.network.neutron [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 924.993804] env[62619]: DEBUG nova.policy [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9788d9778eb248149078c082538e4fa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6e92987102742d9b65b83850b6e5e7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 925.008102] env[62619]: DEBUG nova.compute.manager [req-e87da20c-aafe-489b-945e-6ea126548256 req-218cc695-a3ef-4db5-ba3a-6a5dc12dd05f service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Received event network-vif-plugged-947d01f5-2fdd-4496-9e40-26ff1765031b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 925.008338] env[62619]: DEBUG oslo_concurrency.lockutils [req-e87da20c-aafe-489b-945e-6ea126548256 req-218cc695-a3ef-4db5-ba3a-6a5dc12dd05f service nova] Acquiring lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.008585] env[62619]: DEBUG oslo_concurrency.lockutils [req-e87da20c-aafe-489b-945e-6ea126548256 req-218cc695-a3ef-4db5-ba3a-6a5dc12dd05f service nova] Lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.008775] env[62619]: DEBUG oslo_concurrency.lockutils [req-e87da20c-aafe-489b-945e-6ea126548256 req-218cc695-a3ef-4db5-ba3a-6a5dc12dd05f service nova] Lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 925.008935] env[62619]: DEBUG nova.compute.manager [req-e87da20c-aafe-489b-945e-6ea126548256 req-218cc695-a3ef-4db5-ba3a-6a5dc12dd05f service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] No waiting events found dispatching network-vif-plugged-947d01f5-2fdd-4496-9e40-26ff1765031b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 925.009119] env[62619]: WARNING nova.compute.manager [req-e87da20c-aafe-489b-945e-6ea126548256 req-218cc695-a3ef-4db5-ba3a-6a5dc12dd05f service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Received unexpected event network-vif-plugged-947d01f5-2fdd-4496-9e40-26ff1765031b for instance with vm_state building and task_state spawning. [ 925.097805] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365133, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.161548] env[62619]: DEBUG nova.network.neutron [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Successfully updated port: 947d01f5-2fdd-4496-9e40-26ff1765031b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 925.188985] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c5918a75-de48-457f-86ce-9103825fd1ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.191963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 925.192220] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 925.199053] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39e259e9-2621-4a72-93e9-dbfb8e65a303 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.234422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13a43991-0057-4e17-ac89-e3b90bd52430 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.242470] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25ac911b-f024-4372-9242-cd288db711a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 925.256647] env[62619]: DEBUG nova.compute.provider_tree [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 925.335216] env[62619]: DEBUG nova.network.neutron [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Successfully created port: 53678dcd-6f40-4645-961d-a9c2608eeba8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 925.410515] env[62619]: DEBUG nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 925.563981] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.564283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.564530] env[62619]: DEBUG nova.network.neutron [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 925.601782] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365133, 'name': CopyVirtualDisk_Task} progress is 15%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 925.669023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "refresh_cache-c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 925.669023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "refresh_cache-c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 925.669023] env[62619]: DEBUG nova.network.neutron [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 925.695128] env[62619]: DEBUG nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 925.725486] env[62619]: DEBUG nova.network.neutron [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance_info_cache with network_info: [{"id": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "address": "fa:16:3e:d5:c3:bc", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa0389ef1-cf", "ovs_interfaceid": "a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 925.759577] env[62619]: DEBUG nova.scheduler.client.report [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 926.100340] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365133, 'name': CopyVirtualDisk_Task} progress is 35%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.170115] env[62619]: DEBUG nova.compute.manager [req-0dcec58a-ffb9-4b30-b1de-019db8e52d5e req-7e931371-9da4-49f0-8c6e-7677c22029e2 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 926.170389] env[62619]: DEBUG nova.compute.manager [req-0dcec58a-ffb9-4b30-b1de-019db8e52d5e req-7e931371-9da4-49f0-8c6e-7677c22029e2 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing instance network info cache due to event network-changed-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 926.170389] env[62619]: DEBUG oslo_concurrency.lockutils [req-0dcec58a-ffb9-4b30-b1de-019db8e52d5e req-7e931371-9da4-49f0-8c6e-7677c22029e2 service nova] Acquiring lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 926.205164] env[62619]: DEBUG nova.network.neutron [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 926.224190] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.229853] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-11542a9b-6556-4b4b-88fe-26c6be2969f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.230140] env[62619]: DEBUG nova.objects.instance [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lazy-loading 'migration_context' on Instance uuid 11542a9b-6556-4b4b-88fe-26c6be2969f6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 926.265010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.872s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.268795] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 7.469s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.270946] env[62619]: INFO nova.compute.claims [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 926.298907] env[62619]: INFO nova.scheduler.client.report [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleted allocations for instance 85e279da-e067-46f8-929b-87a013c4e7f4 [ 926.310022] env[62619]: INFO nova.network.neutron [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Port 9413dc32-2f0c-4650-952a-63bed028a099 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 926.310022] env[62619]: DEBUG nova.network.neutron [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.369984] env[62619]: DEBUG nova.network.neutron [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Updating instance_info_cache with network_info: [{"id": "947d01f5-2fdd-4496-9e40-26ff1765031b", "address": "fa:16:3e:3c:3f:5c", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947d01f5-2f", "ovs_interfaceid": "947d01f5-2fdd-4496-9e40-26ff1765031b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 926.422241] env[62619]: DEBUG nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 926.452119] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 926.452464] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 926.452630] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 926.452818] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 926.452967] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 926.453132] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 926.453411] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 926.453586] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 926.453761] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 926.454413] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 926.454413] env[62619]: DEBUG nova.virt.hardware [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 926.455078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe04c63d-f1ad-47fe-b5bc-ddbc77323cee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.463373] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-549dd010-aa62-40c9-a01d-fa6b09d69f27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.599138] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365133, 'name': CopyVirtualDisk_Task} progress is 54%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.733196] env[62619]: DEBUG nova.objects.base [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Object Instance<11542a9b-6556-4b4b-88fe-26c6be2969f6> lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 926.734213] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ee0c8ae-2bf7-4458-a39c-58bb00dfe073 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.754476] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36b68669-a23e-4c11-9f42-795e5e71e269 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.760321] env[62619]: DEBUG oslo_vmware.api [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 926.760321] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52e238da-62d1-5da4-f1d6-b9b4b2717f8a" [ 926.760321] env[62619]: _type = "Task" [ 926.760321] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.768453] env[62619]: DEBUG oslo_vmware.api [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52e238da-62d1-5da4-f1d6-b9b4b2717f8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.807423] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4fa6cc8a-14b4-4523-a9e7-37e09b07a7b8 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "85e279da-e067-46f8-929b-87a013c4e7f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.708s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 926.810642] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.812934] env[62619]: DEBUG oslo_concurrency.lockutils [req-0dcec58a-ffb9-4b30-b1de-019db8e52d5e req-7e931371-9da4-49f0-8c6e-7677c22029e2 service nova] Acquired lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 926.813155] env[62619]: DEBUG nova.network.neutron [req-0dcec58a-ffb9-4b30-b1de-019db8e52d5e req-7e931371-9da4-49f0-8c6e-7677c22029e2 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Refreshing network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 926.872959] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "refresh_cache-c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 926.873303] env[62619]: DEBUG nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Instance network_info: |[{"id": "947d01f5-2fdd-4496-9e40-26ff1765031b", "address": "fa:16:3e:3c:3f:5c", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947d01f5-2f", "ovs_interfaceid": "947d01f5-2fdd-4496-9e40-26ff1765031b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 926.873754] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3c:3f:5c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '947d01f5-2fdd-4496-9e40-26ff1765031b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 926.881429] env[62619]: DEBUG oslo.service.loopingcall [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 926.881729] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 926.881986] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4cf04600-3a98-4bdd-9bdb-4c53fb2481ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 926.903727] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 926.903727] env[62619]: value = "task-1365134" [ 926.903727] env[62619]: _type = "Task" [ 926.903727] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 926.911906] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365134, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 926.992424] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "interface-8c07697f-0e20-4ec5-88ec-ec4420906313-9413dc32-2f0c-4650-952a-63bed028a099" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 926.992778] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "interface-8c07697f-0e20-4ec5-88ec-ec4420906313-9413dc32-2f0c-4650-952a-63bed028a099" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 926.993325] env[62619]: DEBUG nova.objects.instance [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lazy-loading 'flavor' on Instance uuid 8c07697f-0e20-4ec5-88ec-ec4420906313 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.102095] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365133, 'name': CopyVirtualDisk_Task} progress is 74%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.134624] env[62619]: DEBUG nova.network.neutron [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Successfully updated port: 53678dcd-6f40-4645-961d-a9c2608eeba8 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 927.176041] env[62619]: DEBUG nova.compute.manager [req-5b9067fa-c4ee-4034-a1f6-9c2db3164fd7 req-69d4ec25-4992-4cc6-87c6-3c9f90c06823 service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Received event network-changed-947d01f5-2fdd-4496-9e40-26ff1765031b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 927.176422] env[62619]: DEBUG nova.compute.manager [req-5b9067fa-c4ee-4034-a1f6-9c2db3164fd7 req-69d4ec25-4992-4cc6-87c6-3c9f90c06823 service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Refreshing instance network info cache due to event network-changed-947d01f5-2fdd-4496-9e40-26ff1765031b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 927.176611] env[62619]: DEBUG oslo_concurrency.lockutils [req-5b9067fa-c4ee-4034-a1f6-9c2db3164fd7 req-69d4ec25-4992-4cc6-87c6-3c9f90c06823 service nova] Acquiring lock "refresh_cache-c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.176704] env[62619]: DEBUG oslo_concurrency.lockutils [req-5b9067fa-c4ee-4034-a1f6-9c2db3164fd7 req-69d4ec25-4992-4cc6-87c6-3c9f90c06823 service nova] Acquired lock "refresh_cache-c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.176941] env[62619]: DEBUG nova.network.neutron [req-5b9067fa-c4ee-4034-a1f6-9c2db3164fd7 req-69d4ec25-4992-4cc6-87c6-3c9f90c06823 service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Refreshing network info cache for port 947d01f5-2fdd-4496-9e40-26ff1765031b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 927.276635] env[62619]: DEBUG oslo_vmware.api [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52e238da-62d1-5da4-f1d6-b9b4b2717f8a, 'name': SearchDatastore_Task, 'duration_secs': 0.044284} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.277149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 927.316078] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb815717-431b-4855-b0e7-2aa9cfc16f53 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "interface-646b4ae6-09e1-4b3c-b17d-392e746df454-9413dc32-2f0c-4650-952a-63bed028a099" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 10.343s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 927.418359] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365134, 'name': CreateVM_Task, 'duration_secs': 0.41234} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.418359] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 927.418779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.419647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.419753] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 927.426216] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ace6ddef-47d5-41a8-8a00-cb0e3bd81dbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.436302] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 927.436302] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5225c93b-d9d0-8995-d916-e062ab725c86" [ 927.436302] env[62619]: _type = "Task" [ 927.436302] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.446508] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5225c93b-d9d0-8995-d916-e062ab725c86, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.558153] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f053cd82-7165-47f2-a2b1-1efe1babe133 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.567513] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74c1031b-7694-4276-b42d-95e4f6d9de6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.611218] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c00246fb-03fb-42cd-aa9c-bba6600eb7b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.622659] env[62619]: DEBUG nova.network.neutron [req-0dcec58a-ffb9-4b30-b1de-019db8e52d5e req-7e931371-9da4-49f0-8c6e-7677c22029e2 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updated VIF entry in instance network info cache for port 7b6beb91-6e5d-49a9-8465-ec68d37a4bbf. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 927.623178] env[62619]: DEBUG nova.network.neutron [req-0dcec58a-ffb9-4b30-b1de-019db8e52d5e req-7e931371-9da4-49f0-8c6e-7677c22029e2 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [{"id": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "address": "fa:16:3e:a9:43:0d", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7b6beb91-6e", "ovs_interfaceid": "7b6beb91-6e5d-49a9-8465-ec68d37a4bbf", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.629071] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a58f2f57-e47f-4c90-98e5-328b4faaee9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.634491] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365133, 'name': CopyVirtualDisk_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 927.647093] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.647184] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.647289] env[62619]: DEBUG nova.network.neutron [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 927.648692] env[62619]: DEBUG nova.compute.provider_tree [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 927.660317] env[62619]: DEBUG nova.objects.instance [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lazy-loading 'pci_requests' on Instance uuid 8c07697f-0e20-4ec5-88ec-ec4420906313 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 927.885608] env[62619]: DEBUG nova.network.neutron [req-5b9067fa-c4ee-4034-a1f6-9c2db3164fd7 req-69d4ec25-4992-4cc6-87c6-3c9f90c06823 service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Updated VIF entry in instance network info cache for port 947d01f5-2fdd-4496-9e40-26ff1765031b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 927.885994] env[62619]: DEBUG nova.network.neutron [req-5b9067fa-c4ee-4034-a1f6-9c2db3164fd7 req-69d4ec25-4992-4cc6-87c6-3c9f90c06823 service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Updating instance_info_cache with network_info: [{"id": "947d01f5-2fdd-4496-9e40-26ff1765031b", "address": "fa:16:3e:3c:3f:5c", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap947d01f5-2f", "ovs_interfaceid": "947d01f5-2fdd-4496-9e40-26ff1765031b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 927.947887] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5225c93b-d9d0-8995-d916-e062ab725c86, 'name': SearchDatastore_Task, 'duration_secs': 0.020962} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 927.948278] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 927.954564] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 927.954564] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 927.954564] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 927.954564] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 927.954564] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-0d963238-7261-4cbc-a574-a80cfa1b2444 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.966514] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 927.966722] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 927.967501] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d5e92cfd-a6b5-4318-ab13-ea0809445430 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 927.973139] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 927.973139] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52397c24-81df-a0aa-d9fa-d5ed54d31545" [ 927.973139] env[62619]: _type = "Task" [ 927.973139] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 927.980893] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52397c24-81df-a0aa-d9fa-d5ed54d31545, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.112338] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365133, 'name': CopyVirtualDisk_Task, 'duration_secs': 3.301512} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.112752] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/4be035b6-4033-4f5c-9c02-c3d1968a1982/4be035b6-4033-4f5c-9c02-c3d1968a1982.vmdk to [datastore1] ef1e80cf-2ea2-4764-851a-8aa97563a278/ef1e80cf-2ea2-4764-851a-8aa97563a278.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 928.113412] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c28970a5-87fd-4561-9e2e-5217517d14f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.136066] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Reconfiguring VM instance instance-00000054 to attach disk [datastore1] ef1e80cf-2ea2-4764-851a-8aa97563a278/ef1e80cf-2ea2-4764-851a-8aa97563a278.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 928.136066] env[62619]: DEBUG oslo_concurrency.lockutils [req-0dcec58a-ffb9-4b30-b1de-019db8e52d5e req-7e931371-9da4-49f0-8c6e-7677c22029e2 service nova] Releasing lock "refresh_cache-646b4ae6-09e1-4b3c-b17d-392e746df454" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.136259] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a632cbd7-f3d0-432e-9558-08bd6e7a0f0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.153125] env[62619]: DEBUG nova.scheduler.client.report [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 928.161451] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 928.161451] env[62619]: value = "task-1365135" [ 928.161451] env[62619]: _type = "Task" [ 928.161451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.165034] env[62619]: DEBUG nova.objects.base [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Object Instance<8c07697f-0e20-4ec5-88ec-ec4420906313> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 928.165034] env[62619]: DEBUG nova.network.neutron [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 928.172972] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365135, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.203052] env[62619]: DEBUG nova.network.neutron [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 928.260858] env[62619]: DEBUG nova.compute.manager [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received event network-changed-d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 928.261080] env[62619]: DEBUG nova.compute.manager [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing instance network info cache due to event network-changed-d70279ce-58c0-45c4-9a74-8f1f74552d21. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 928.261316] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Acquiring lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 928.261471] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Acquired lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 928.261673] env[62619]: DEBUG nova.network.neutron [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing network info cache for port d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 928.281779] env[62619]: DEBUG nova.policy [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b3041343376d4f2fad14577d5c412b1a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4270942193cd4a9aa397784368b9ae64', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 928.389053] env[62619]: DEBUG oslo_concurrency.lockutils [req-5b9067fa-c4ee-4034-a1f6-9c2db3164fd7 req-69d4ec25-4992-4cc6-87c6-3c9f90c06823 service nova] Releasing lock "refresh_cache-c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 928.485306] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52397c24-81df-a0aa-d9fa-d5ed54d31545, 'name': SearchDatastore_Task, 'duration_secs': 0.082122} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.486103] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ec9bde2f-ded5-400a-b827-9ad21229e091 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.491380] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 928.491380] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52457020-bec5-6409-3508-f580ee2ea583" [ 928.491380] env[62619]: _type = "Task" [ 928.491380] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.499308] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52457020-bec5-6409-3508-f580ee2ea583, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.606029] env[62619]: DEBUG nova.network.neutron [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updating instance_info_cache with network_info: [{"id": "53678dcd-6f40-4645-961d-a9c2608eeba8", "address": "fa:16:3e:bb:46:22", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53678dcd-6f", "ovs_interfaceid": "53678dcd-6f40-4645-961d-a9c2608eeba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 928.657785] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.390s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 928.658072] env[62619]: DEBUG nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 928.660625] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 8.783s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 928.660847] env[62619]: DEBUG nova.objects.instance [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lazy-loading 'resources' on Instance uuid ca452ef6-d777-46dd-a313-ae7dd441adca {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 928.670589] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365135, 'name': ReconfigVM_Task, 'duration_secs': 0.290084} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 928.670844] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Reconfigured VM instance instance-00000054 to attach disk [datastore1] ef1e80cf-2ea2-4764-851a-8aa97563a278/ef1e80cf-2ea2-4764-851a-8aa97563a278.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 928.671506] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fd5e9c4f-f2ea-4fa3-be5d-94dafb1fbfa6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 928.679581] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 928.679581] env[62619]: value = "task-1365136" [ 928.679581] env[62619]: _type = "Task" [ 928.679581] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 928.686578] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365136, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 928.977440] env[62619]: DEBUG nova.network.neutron [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updated VIF entry in instance network info cache for port d70279ce-58c0-45c4-9a74-8f1f74552d21. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 928.977923] env[62619]: DEBUG nova.network.neutron [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 929.002043] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52457020-bec5-6409-3508-f580ee2ea583, 'name': SearchDatastore_Task, 'duration_secs': 0.094672} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.002261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.002505] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] c4d0e4fc-9ce2-4ebc-8ede-337f843ec855/c4d0e4fc-9ce2-4ebc-8ede-337f843ec855.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 929.002782] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f69d98a3-31b8-4097-99b1-b5369d83bae1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.009215] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 929.009215] env[62619]: value = "task-1365137" [ 929.009215] env[62619]: _type = "Task" [ 929.009215] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.016986] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365137, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.108577] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.108917] env[62619]: DEBUG nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Instance network_info: |[{"id": "53678dcd-6f40-4645-961d-a9c2608eeba8", "address": "fa:16:3e:bb:46:22", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53678dcd-6f", "ovs_interfaceid": "53678dcd-6f40-4645-961d-a9c2608eeba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 929.109386] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:bb:46:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd69a4b11-8d65-435f-94a5-28f74a39a718', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '53678dcd-6f40-4645-961d-a9c2608eeba8', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 929.118097] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Creating folder: Project (b6e92987102742d9b65b83850b6e5e7e). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.118432] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f19c8f38-df63-4a09-bf64-96dfa104bd3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.129071] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Created folder: Project (b6e92987102742d9b65b83850b6e5e7e) in parent group-v290436. [ 929.130024] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Creating folder: Instances. Parent ref: group-v290519. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 929.130024] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7d3caf40-6d88-4bdc-a262-0bf32d53fb7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.139311] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Created folder: Instances in parent group-v290519. [ 929.139612] env[62619]: DEBUG oslo.service.loopingcall [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 929.139851] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 929.140084] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-914a16e1-792f-45aa-9335-50b49e904ad3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.159831] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 929.159831] env[62619]: value = "task-1365140" [ 929.159831] env[62619]: _type = "Task" [ 929.159831] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.164171] env[62619]: DEBUG nova.compute.utils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 929.165866] env[62619]: DEBUG nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 929.166058] env[62619]: DEBUG nova.network.neutron [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 929.175243] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365140, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.190458] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365136, 'name': Rename_Task, 'duration_secs': 0.150265} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.193474] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 929.194279] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6f02b369-b3b9-4660-a8ab-e022548700df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.201296] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 929.201296] env[62619]: value = "task-1365141" [ 929.201296] env[62619]: _type = "Task" [ 929.201296] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 929.212303] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365141, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.213642] env[62619]: DEBUG nova.policy [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9788d9778eb248149078c082538e4fa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6e92987102742d9b65b83850b6e5e7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 929.420317] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0fbcd4-b904-463c-beae-1e320ee14ff7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.429400] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e18f745f-b939-47ba-ab3f-ac7076bba06a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.467591] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65f41d63-1fde-4237-8339-ed3a0f823e4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.477299] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bfa8cf6-285b-4433-a1bc-376f52240a1b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 929.482726] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Releasing lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 929.482726] env[62619]: DEBUG nova.compute.manager [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Received event network-vif-plugged-53678dcd-6f40-4645-961d-a9c2608eeba8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.482726] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Acquiring lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 929.482922] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 929.483108] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 929.483285] env[62619]: DEBUG nova.compute.manager [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] No waiting events found dispatching network-vif-plugged-53678dcd-6f40-4645-961d-a9c2608eeba8 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 929.483457] env[62619]: WARNING nova.compute.manager [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Received unexpected event network-vif-plugged-53678dcd-6f40-4645-961d-a9c2608eeba8 for instance with vm_state building and task_state spawning. [ 929.484060] env[62619]: DEBUG nova.compute.manager [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Received event network-changed-53678dcd-6f40-4645-961d-a9c2608eeba8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 929.484060] env[62619]: DEBUG nova.compute.manager [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Refreshing instance network info cache due to event network-changed-53678dcd-6f40-4645-961d-a9c2608eeba8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 929.484060] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Acquiring lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 929.484228] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Acquired lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 929.484310] env[62619]: DEBUG nova.network.neutron [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Refreshing network info cache for port 53678dcd-6f40-4645-961d-a9c2608eeba8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 929.499013] env[62619]: DEBUG nova.compute.provider_tree [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 929.501380] env[62619]: DEBUG nova.network.neutron [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Successfully created port: b144833e-a8d9-4967-bb72-78e71720d55e {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 929.520876] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365137, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.671846] env[62619]: DEBUG nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 929.674452] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365140, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 929.713028] env[62619]: DEBUG oslo_vmware.api [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365141, 'name': PowerOnVM_Task, 'duration_secs': 0.50299} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 929.713028] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 929.713028] env[62619]: INFO nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Took 16.02 seconds to spawn the instance on the hypervisor. [ 929.713028] env[62619]: DEBUG nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 929.713367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38cc96ad-9de3-4502-a506-4d06771e1717 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.005163] env[62619]: DEBUG nova.scheduler.client.report [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 930.018362] env[62619]: DEBUG nova.network.neutron [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Successfully updated port: 9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 930.027234] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365137, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.584715} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.027500] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] c4d0e4fc-9ce2-4ebc-8ede-337f843ec855/c4d0e4fc-9ce2-4ebc-8ede-337f843ec855.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 930.027731] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 930.027969] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8e87fa50-fd13-418a-88f7-b903fc61ca98 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.035513] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 930.035513] env[62619]: value = "task-1365142" [ 930.035513] env[62619]: _type = "Task" [ 930.035513] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.044012] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365142, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.172493] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365140, 'name': CreateVM_Task, 'duration_secs': 0.535854} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.174743] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 930.179630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.179630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.179630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 930.179933] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-71b97981-f21c-4ede-8682-0eaa32bf9247 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.184834] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 930.184834] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c0f6f6-9082-1220-2208-7b467260e61d" [ 930.184834] env[62619]: _type = "Task" [ 930.184834] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.193291] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c0f6f6-9082-1220-2208-7b467260e61d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.231022] env[62619]: INFO nova.compute.manager [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Took 28.22 seconds to build instance. [ 930.241772] env[62619]: DEBUG nova.network.neutron [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updated VIF entry in instance network info cache for port 53678dcd-6f40-4645-961d-a9c2608eeba8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 930.242264] env[62619]: DEBUG nova.network.neutron [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updating instance_info_cache with network_info: [{"id": "53678dcd-6f40-4645-961d-a9c2608eeba8", "address": "fa:16:3e:bb:46:22", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53678dcd-6f", "ovs_interfaceid": "53678dcd-6f40-4645-961d-a9c2608eeba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 930.324588] env[62619]: DEBUG nova.compute.manager [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received event network-vif-plugged-9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.324806] env[62619]: DEBUG oslo_concurrency.lockutils [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] Acquiring lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 930.324990] env[62619]: DEBUG oslo_concurrency.lockutils [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.325181] env[62619]: DEBUG oslo_concurrency.lockutils [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.325353] env[62619]: DEBUG nova.compute.manager [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] No waiting events found dispatching network-vif-plugged-9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 930.325522] env[62619]: WARNING nova.compute.manager [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received unexpected event network-vif-plugged-9413dc32-2f0c-4650-952a-63bed028a099 for instance with vm_state active and task_state None. [ 930.325682] env[62619]: DEBUG nova.compute.manager [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received event network-changed-9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 930.325836] env[62619]: DEBUG nova.compute.manager [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing instance network info cache due to event network-changed-9413dc32-2f0c-4650-952a-63bed028a099. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 930.326030] env[62619]: DEBUG oslo_concurrency.lockutils [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] Acquiring lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.326171] env[62619]: DEBUG oslo_concurrency.lockutils [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] Acquired lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.326423] env[62619]: DEBUG nova.network.neutron [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Refreshing network info cache for port 9413dc32-2f0c-4650-952a-63bed028a099 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 930.513026] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.852s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.515790] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.547s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 930.516080] env[62619]: DEBUG nova.objects.instance [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lazy-loading 'resources' on Instance uuid 33998dc6-3be4-4b78-af12-0ad7bfab70c6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 930.520845] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.540738] env[62619]: INFO nova.scheduler.client.report [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Deleted allocations for instance ca452ef6-d777-46dd-a313-ae7dd441adca [ 930.549227] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365142, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.171792} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.550027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 930.550457] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09b83e28-4ccf-43db-985f-7fdb0efa2a04 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.572622] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Reconfiguring VM instance instance-00000056 to attach disk [datastore2] c4d0e4fc-9ce2-4ebc-8ede-337f843ec855/c4d0e4fc-9ce2-4ebc-8ede-337f843ec855.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 930.572976] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1f22a2ab-b832-4628-ab69-a616dee33509 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.594301] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 930.594301] env[62619]: value = "task-1365143" [ 930.594301] env[62619]: _type = "Task" [ 930.594301] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.603747] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365143, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.681602] env[62619]: DEBUG nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 930.696153] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c0f6f6-9082-1220-2208-7b467260e61d, 'name': SearchDatastore_Task, 'duration_secs': 0.053798} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 930.696550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 930.696863] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 930.697178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 930.697394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 930.697634] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 930.698015] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-db4617a7-35ea-4c4b-8b8c-1ac6c95a423c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.708199] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 930.708444] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 930.708606] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 930.708792] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 930.708944] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 930.709107] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 930.709317] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 930.709481] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 930.709651] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 930.709819] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 930.709995] env[62619]: DEBUG nova.virt.hardware [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 930.710814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-26b6fbab-3757-45c3-860e-feea0c5cbd39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.714400] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 930.714400] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 930.715401] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a6cc5467-d067-43ed-b940-cb0b2798fa09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.722740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a311549-3459-405f-ac45-d6d457343e67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 930.728105] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 930.728105] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5229b9d0-0132-53f8-9b09-a2ffb76252d8" [ 930.728105] env[62619]: _type = "Task" [ 930.728105] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 930.737819] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a9b9a712-5d04-414a-a9af-1cdd812e940c tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "ef1e80cf-2ea2-4764-851a-8aa97563a278" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 29.740s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 930.742542] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5229b9d0-0132-53f8-9b09-a2ffb76252d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 930.745094] env[62619]: DEBUG oslo_concurrency.lockutils [req-2d3bf88b-662b-4437-8a36-49bb94b95ec9 req-5ae9d978-a43d-4ff3-9803-c0743e88ed85 service nova] Releasing lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.007132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "ef1e80cf-2ea2-4764-851a-8aa97563a278" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.007419] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "ef1e80cf-2ea2-4764-851a-8aa97563a278" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.007633] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "ef1e80cf-2ea2-4764-851a-8aa97563a278-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 931.007821] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "ef1e80cf-2ea2-4764-851a-8aa97563a278-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 931.007992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "ef1e80cf-2ea2-4764-851a-8aa97563a278-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.010931] env[62619]: INFO nova.compute.manager [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Terminating instance [ 931.012837] env[62619]: DEBUG nova.compute.manager [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 931.013081] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 931.013877] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-844e5c3a-75a8-4a87-9006-a63f6a91e563 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.025514] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 931.026437] env[62619]: DEBUG nova.network.neutron [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Successfully updated port: b144833e-a8d9-4967-bb72-78e71720d55e {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 931.027584] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-32608ee8-4459-4c22-b605-4cd725896f71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.033802] env[62619]: DEBUG oslo_vmware.api [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 931.033802] env[62619]: value = "task-1365144" [ 931.033802] env[62619]: _type = "Task" [ 931.033802] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.049613] env[62619]: DEBUG oslo_vmware.api [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365144, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.050117] env[62619]: DEBUG oslo_concurrency.lockutils [None req-36929ccd-29d8-4380-800b-eeacf47535d3 tempest-ListServerFiltersTestJSON-502742612 tempest-ListServerFiltersTestJSON-502742612-project-member] Lock "ca452ef6-d777-46dd-a313-ae7dd441adca" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 14.184s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 931.085760] env[62619]: DEBUG nova.network.neutron [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Added VIF to instance network info cache for port 9413dc32-2f0c-4650-952a-63bed028a099. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3489}} [ 931.086220] env[62619]: DEBUG nova.network.neutron [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9413dc32-2f0c-4650-952a-63bed028a099", "address": "fa:16:3e:5b:0a:fa", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9413dc32-2f", "ovs_interfaceid": "9413dc32-2f0c-4650-952a-63bed028a099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 931.104159] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365143, 'name': ReconfigVM_Task, 'duration_secs': 0.272813} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.107231] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Reconfigured VM instance instance-00000056 to attach disk [datastore2] c4d0e4fc-9ce2-4ebc-8ede-337f843ec855/c4d0e4fc-9ce2-4ebc-8ede-337f843ec855.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 931.108301] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-d716cc64-0623-42c5-a212-ceefcc295acf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.114311] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 931.114311] env[62619]: value = "task-1365145" [ 931.114311] env[62619]: _type = "Task" [ 931.114311] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.122644] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365145, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.224166] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e99fe87a-c8f7-4210-a140-23e235d1c53a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.234511] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec3615c1-a2a9-4d98-a495-34a080031add {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.241769] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5229b9d0-0132-53f8-9b09-a2ffb76252d8, 'name': SearchDatastore_Task, 'duration_secs': 0.019747} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.245018] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-78cc6610-3354-4016-85c8-d9a00852dc05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.272045] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45c5c625-b25c-43bb-8032-7b5fbce93bd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.276135] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 931.276135] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52468b72-662a-bd8e-1a90-ab701b5d6b6c" [ 931.276135] env[62619]: _type = "Task" [ 931.276135] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.283797] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ac91e46-67ca-4a1b-a3eb-03f2c2f27d71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.290999] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52468b72-662a-bd8e-1a90-ab701b5d6b6c, 'name': SearchDatastore_Task, 'duration_secs': 0.010199} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.291756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.292066] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354/02dec4f2-cbe7-4bb0-a57e-3970c5669354.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 931.292329] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-98189f37-6554-42d0-aad2-69f3f86e88ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.302021] env[62619]: DEBUG nova.compute.provider_tree [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 931.307226] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 931.307226] env[62619]: value = "task-1365146" [ 931.307226] env[62619]: _type = "Task" [ 931.307226] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.315183] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365146, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.530485] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 931.530681] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.530841] env[62619]: DEBUG nova.network.neutron [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 931.548166] env[62619]: DEBUG oslo_vmware.api [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365144, 'name': PowerOffVM_Task, 'duration_secs': 0.167202} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.548166] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 931.548166] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 931.548964] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72dab869-d317-4cea-a56c-c80bd901af80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.589535] env[62619]: DEBUG oslo_concurrency.lockutils [req-7f2a8425-0c25-481c-9cdc-e1eb7396aa6c req-eb573b8b-a7fe-463f-b71a-40d92f109345 service nova] Releasing lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 931.589982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 931.590246] env[62619]: DEBUG nova.network.neutron [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 931.628494] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365145, 'name': Rename_Task, 'duration_secs': 0.174829} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 931.630119] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 931.630492] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 931.630732] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 931.630950] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleting the datastore file [datastore1] ef1e80cf-2ea2-4764-851a-8aa97563a278 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 931.632681] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-6ae55013-d0a6-48f8-8a3c-0bd2f4d9f9e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.634700] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a97e3bd4-f08a-4e8d-b01a-0532ff08a85e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 931.642610] env[62619]: DEBUG oslo_vmware.api [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 931.642610] env[62619]: value = "task-1365148" [ 931.642610] env[62619]: _type = "Task" [ 931.642610] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.644177] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 931.644177] env[62619]: value = "task-1365149" [ 931.644177] env[62619]: _type = "Task" [ 931.644177] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 931.656456] env[62619]: DEBUG oslo_vmware.api [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365148, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.660230] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365149, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 931.805747] env[62619]: DEBUG nova.scheduler.client.report [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 931.823995] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365146, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.062172] env[62619]: DEBUG nova.network.neutron [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 932.127567] env[62619]: WARNING nova.network.neutron [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] f7c32aa4-863f-481c-899f-debfaa1844da already exists in list: networks containing: ['f7c32aa4-863f-481c-899f-debfaa1844da']. ignoring it [ 932.127791] env[62619]: WARNING nova.network.neutron [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] f7c32aa4-863f-481c-899f-debfaa1844da already exists in list: networks containing: ['f7c32aa4-863f-481c-899f-debfaa1844da']. ignoring it [ 932.127962] env[62619]: WARNING nova.network.neutron [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] 9413dc32-2f0c-4650-952a-63bed028a099 already exists in list: port_ids containing: ['9413dc32-2f0c-4650-952a-63bed028a099']. ignoring it [ 932.159482] env[62619]: DEBUG oslo_vmware.api [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365148, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.256492} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.159808] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 932.160961] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 932.161230] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 932.161428] env[62619]: INFO nova.compute.manager [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Took 1.15 seconds to destroy the instance on the hypervisor. [ 932.161691] env[62619]: DEBUG oslo.service.loopingcall [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 932.162366] env[62619]: DEBUG nova.compute.manager [-] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 932.162474] env[62619]: DEBUG nova.network.neutron [-] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 932.173028] env[62619]: DEBUG oslo_vmware.api [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365149, 'name': PowerOnVM_Task, 'duration_secs': 0.52269} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.173028] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 932.173028] env[62619]: INFO nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Took 8.12 seconds to spawn the instance on the hypervisor. [ 932.173028] env[62619]: DEBUG nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 932.173028] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da3128a5-761b-4205-807d-4fbe2fbe33f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.320986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.805s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.325223] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365146, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.533523} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.325961] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 10.010s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.326320] env[62619]: DEBUG nova.objects.instance [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lazy-loading 'resources' on Instance uuid 42aeba4e-5c87-46d5-9c7c-c6f263c69171 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 932.327503] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354/02dec4f2-cbe7-4bb0-a57e-3970c5669354.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 932.330744] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 932.330744] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-109a3eb2-626c-4baa-ab38-d8290fa2cd5d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.340548] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 932.340548] env[62619]: value = "task-1365150" [ 932.340548] env[62619]: _type = "Task" [ 932.340548] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.350457] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365150, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.355387] env[62619]: INFO nova.scheduler.client.report [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Deleted allocations for instance 33998dc6-3be4-4b78-af12-0ad7bfab70c6 [ 932.492462] env[62619]: DEBUG nova.network.neutron [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Updating instance_info_cache with network_info: [{"id": "b144833e-a8d9-4967-bb72-78e71720d55e", "address": "fa:16:3e:20:f0:33", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb144833e-a8", "ovs_interfaceid": "b144833e-a8d9-4967-bb72-78e71720d55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.593186] env[62619]: DEBUG nova.compute.manager [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Received event network-vif-plugged-b144833e-a8d9-4967-bb72-78e71720d55e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.593426] env[62619]: DEBUG oslo_concurrency.lockutils [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] Acquiring lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 932.593648] env[62619]: DEBUG oslo_concurrency.lockutils [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] Lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 932.593827] env[62619]: DEBUG oslo_concurrency.lockutils [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] Lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.594011] env[62619]: DEBUG nova.compute.manager [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] No waiting events found dispatching network-vif-plugged-b144833e-a8d9-4967-bb72-78e71720d55e {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 932.594206] env[62619]: WARNING nova.compute.manager [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Received unexpected event network-vif-plugged-b144833e-a8d9-4967-bb72-78e71720d55e for instance with vm_state building and task_state spawning. [ 932.594365] env[62619]: DEBUG nova.compute.manager [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Received event network-changed-b144833e-a8d9-4967-bb72-78e71720d55e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 932.594518] env[62619]: DEBUG nova.compute.manager [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Refreshing instance network info cache due to event network-changed-b144833e-a8d9-4967-bb72-78e71720d55e. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 932.594684] env[62619]: DEBUG oslo_concurrency.lockutils [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] Acquiring lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 932.657110] env[62619]: DEBUG nova.network.neutron [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "9413dc32-2f0c-4650-952a-63bed028a099", "address": "fa:16:3e:5b:0a:fa", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9413dc32-2f", "ovs_interfaceid": "9413dc32-2f0c-4650-952a-63bed028a099", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": true, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 932.693804] env[62619]: INFO nova.compute.manager [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Took 16.50 seconds to build instance. [ 932.852589] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365150, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.118619} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 932.852943] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 932.854163] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4f01e7b-e6b0-44db-a75e-c14f93f128ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.880908] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354/02dec4f2-cbe7-4bb0-a57e-3970c5669354.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 932.881537] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77b1ec33-fa1f-4c51-9c94-607f4d5df1bd tempest-ServerRescueTestJSONUnderV235-2045674175 tempest-ServerRescueTestJSONUnderV235-2045674175-project-member] Lock "33998dc6-3be4-4b78-af12-0ad7bfab70c6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.986s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 932.886543] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c9318e80-d6b9-4a01-a028-ade6fd4e60ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 932.909185] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 932.909185] env[62619]: value = "task-1365151" [ 932.909185] env[62619]: _type = "Task" [ 932.909185] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 932.921505] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365151, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 932.995496] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 932.995685] env[62619]: DEBUG nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Instance network_info: |[{"id": "b144833e-a8d9-4967-bb72-78e71720d55e", "address": "fa:16:3e:20:f0:33", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb144833e-a8", "ovs_interfaceid": "b144833e-a8d9-4967-bb72-78e71720d55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 932.995858] env[62619]: DEBUG oslo_concurrency.lockutils [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] Acquired lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 932.996057] env[62619]: DEBUG nova.network.neutron [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Refreshing network info cache for port b144833e-a8d9-4967-bb72-78e71720d55e {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 932.997350] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:20:f0:33', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd69a4b11-8d65-435f-94a5-28f74a39a718', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b144833e-a8d9-4967-bb72-78e71720d55e', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 933.012424] env[62619]: DEBUG oslo.service.loopingcall [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 933.019669] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 933.020513] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-691a7ec5-0460-4967-9823-c0b62be2a550 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.055231] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 933.055231] env[62619]: value = "task-1365152" [ 933.055231] env[62619]: _type = "Task" [ 933.055231] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.070979] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365152, 'name': CreateVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.085442] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6205e22-8c9b-46ea-9d6a-13320e011e65 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.092440] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b5532f49-a332-44d3-8044-58d8629af2c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.123935] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-568a3bab-3c94-48ad-9489-13ae785c24f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.131460] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fa53d90-3b12-4c43-ad05-a1f4e61deb49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.145395] env[62619]: DEBUG nova.compute.provider_tree [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.148611] env[62619]: DEBUG nova.network.neutron [-] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.163158] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.163669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 933.163829] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 933.164820] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dea1dc57-80f6-4010-b150-5babc57c8a85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.181854] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=,min_ram=,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 933.182276] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 933.182382] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 933.182477] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 933.182627] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 933.182774] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 933.182985] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 933.183197] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 933.183311] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 933.183475] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 933.183647] env[62619]: DEBUG nova.virt.hardware [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 933.189995] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Reconfiguring VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1923}} [ 933.192931] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ca554fc8-5a93-479d-9054-4ec679b2f388 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.204978] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26113019-5cc5-4b4c-87d1-c32f9c48155d tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.038s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 933.212039] env[62619]: DEBUG oslo_vmware.api [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 933.212039] env[62619]: value = "task-1365153" [ 933.212039] env[62619]: _type = "Task" [ 933.212039] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.223080] env[62619]: DEBUG oslo_vmware.api [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365153, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.317782] env[62619]: DEBUG nova.network.neutron [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Updated VIF entry in instance network info cache for port b144833e-a8d9-4967-bb72-78e71720d55e. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 933.318165] env[62619]: DEBUG nova.network.neutron [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Updating instance_info_cache with network_info: [{"id": "b144833e-a8d9-4967-bb72-78e71720d55e", "address": "fa:16:3e:20:f0:33", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb144833e-a8", "ovs_interfaceid": "b144833e-a8d9-4967-bb72-78e71720d55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 933.421146] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365151, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.565363] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365152, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.654494] env[62619]: INFO nova.compute.manager [-] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Took 1.49 seconds to deallocate network for instance. [ 933.675943] env[62619]: ERROR nova.scheduler.client.report [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [req-4b1dc7d4-85ad-4a60-b4cb-952e21b089aa] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-4b1dc7d4-85ad-4a60-b4cb-952e21b089aa"}]} [ 933.693449] env[62619]: DEBUG nova.scheduler.client.report [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 933.712960] env[62619]: DEBUG nova.scheduler.client.report [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 933.713249] env[62619]: DEBUG nova.compute.provider_tree [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 933.724477] env[62619]: DEBUG oslo_vmware.api [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365153, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.726014] env[62619]: DEBUG nova.scheduler.client.report [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 933.746322] env[62619]: DEBUG nova.scheduler.client.report [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 933.821701] env[62619]: DEBUG oslo_concurrency.lockutils [req-348511c7-f1e3-44d8-b4a0-b41837b5dd90 req-699c03a1-0710-4cda-8281-5b8c165459db service nova] Releasing lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 933.922231] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365151, 'name': ReconfigVM_Task, 'duration_secs': 0.810313} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 933.923114] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354/02dec4f2-cbe7-4bb0-a57e-3970c5669354.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 933.923223] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-95ae88d8-4073-4e5a-9fa3-6e53adbe3de2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.931433] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 933.931433] env[62619]: value = "task-1365154" [ 933.931433] env[62619]: _type = "Task" [ 933.931433] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 933.940148] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba88b442-4539-42ef-82d3-6856bc9a1249 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.945712] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365154, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 933.950496] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c3f39d8-71b9-4677-8b66-d59fce0f0258 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.981798] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25fbd168-24ae-4e43-a153-c7b3c47496ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 933.990013] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c01c464e-001f-4c9e-81e4-6ad812beab24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.011179] env[62619]: DEBUG nova.compute.provider_tree [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.068357] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365152, 'name': CreateVM_Task, 'duration_secs': 0.664361} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.068575] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 934.069466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.069660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.070016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 934.070408] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d66784bc-bf8a-4958-8ea5-506dc97d9ee8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.075792] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 934.075792] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c41741-08b8-f173-c8cc-795bbc822fbc" [ 934.075792] env[62619]: _type = "Task" [ 934.075792] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.084684] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c41741-08b8-f173-c8cc-795bbc822fbc, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.162018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.224817] env[62619]: DEBUG oslo_vmware.api [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365153, 'name': ReconfigVM_Task, 'duration_secs': 0.970449} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.225359] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.225583] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Reconfigured VM to attach interface {{(pid=62619) attach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1937}} [ 934.354197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.355115] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.355115] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 934.355115] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 934.355273] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.357748] env[62619]: INFO nova.compute.manager [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Terminating instance [ 934.359602] env[62619]: DEBUG nova.compute.manager [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 934.359856] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 934.361117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dc21d88-102d-4712-8224-49dd244c6299 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.369358] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 934.369652] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9ecfebfb-8994-4985-a027-fdfa1cb065a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.377749] env[62619]: DEBUG oslo_vmware.api [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 934.377749] env[62619]: value = "task-1365155" [ 934.377749] env[62619]: _type = "Task" [ 934.377749] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.385677] env[62619]: DEBUG oslo_vmware.api [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365155, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.442133] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365154, 'name': Rename_Task, 'duration_secs': 0.242518} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.442510] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 934.442876] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b1282183-c3b8-493b-88ba-691279babca9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.450083] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 934.450083] env[62619]: value = "task-1365156" [ 934.450083] env[62619]: _type = "Task" [ 934.450083] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.459332] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365156, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.560998] env[62619]: DEBUG nova.scheduler.client.report [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 104 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 934.561342] env[62619]: DEBUG nova.compute.provider_tree [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 104 to 105 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 934.561342] env[62619]: DEBUG nova.compute.provider_tree [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 934.588074] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c41741-08b8-f173-c8cc-795bbc822fbc, 'name': SearchDatastore_Task, 'duration_secs': 0.011772} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.588398] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 934.588639] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 934.588875] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 934.589035] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 934.589222] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 934.589495] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3e884821-ef03-4704-a2e2-4e914c1bea25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.598121] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 934.598315] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 934.599074] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-43942f64-aa9c-4126-bff8-af848216b7ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.605463] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 934.605463] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523676eb-b55f-645d-ea20-446636874bb3" [ 934.605463] env[62619]: _type = "Task" [ 934.605463] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.613230] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523676eb-b55f-645d-ea20-446636874bb3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.631638] env[62619]: DEBUG nova.compute.manager [req-33b71b85-c675-4aac-a8b6-4614e80b3837 req-6eeded08-a2d3-484b-a649-37cf2a683831 service nova] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Received event network-vif-deleted-5f723445-4135-4034-b144-6e0a4b2f67fb {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 934.730646] env[62619]: DEBUG oslo_concurrency.lockutils [None req-13e81837-3c23-4cb8-a76f-4d80ac6725ba tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "interface-8c07697f-0e20-4ec5-88ec-ec4420906313-9413dc32-2f0c-4650-952a-63bed028a099" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 7.738s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 934.890628] env[62619]: DEBUG oslo_vmware.api [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365155, 'name': PowerOffVM_Task, 'duration_secs': 0.227553} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 934.890999] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 934.891248] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 934.891555] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-deee07d2-14ce-41d6-82a6-10e114ab14d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.960684] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365156, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 934.967044] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 934.967564] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 934.967823] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleting the datastore file [datastore2] c4d0e4fc-9ce2-4ebc-8ede-337f843ec855 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 934.968149] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-302045ed-1396-41da-823d-701101061f6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.975017] env[62619]: DEBUG oslo_vmware.api [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 934.975017] env[62619]: value = "task-1365158" [ 934.975017] env[62619]: _type = "Task" [ 934.975017] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 934.982798] env[62619]: DEBUG oslo_vmware.api [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365158, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.067474] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.741s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.070255] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 8.846s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 935.072096] env[62619]: INFO nova.compute.claims [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 935.090044] env[62619]: INFO nova.scheduler.client.report [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted allocations for instance 42aeba4e-5c87-46d5-9c7c-c6f263c69171 [ 935.117156] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523676eb-b55f-645d-ea20-446636874bb3, 'name': SearchDatastore_Task, 'duration_secs': 0.009046} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.117971] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03d8ea4d-4fe1-40e2-934a-ebaf07390618 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.123833] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 935.123833] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52412e3a-ed28-5070-1c35-f175a83fbf72" [ 935.123833] env[62619]: _type = "Task" [ 935.123833] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.132612] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52412e3a-ed28-5070-1c35-f175a83fbf72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.460788] env[62619]: DEBUG oslo_vmware.api [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365156, 'name': PowerOnVM_Task, 'duration_secs': 0.720424} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.461151] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 935.461285] env[62619]: INFO nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Took 9.04 seconds to spawn the instance on the hypervisor. [ 935.461468] env[62619]: DEBUG nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 935.462317] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4bc20469-2a2d-449d-a9fb-4199e37e4de2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.484067] env[62619]: DEBUG oslo_vmware.api [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365158, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.452911} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.485441] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 935.485441] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 935.485441] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 935.485441] env[62619]: INFO nova.compute.manager [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Took 1.13 seconds to destroy the instance on the hypervisor. [ 935.485441] env[62619]: DEBUG oslo.service.loopingcall [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 935.485441] env[62619]: DEBUG nova.compute.manager [-] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 935.485441] env[62619]: DEBUG nova.network.neutron [-] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 935.598492] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b89bacf1-c704-48b6-9f59-a33791bd3164 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "42aeba4e-5c87-46d5-9c7c-c6f263c69171" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 15.932s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 935.634322] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52412e3a-ed28-5070-1c35-f175a83fbf72, 'name': SearchDatastore_Task, 'duration_secs': 0.012844} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 935.634989] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 935.634989] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f/495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 935.635182] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9397c2e2-eaf7-432e-8869-b10c4e5efb88 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.642183] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 935.642183] env[62619]: value = "task-1365159" [ 935.642183] env[62619]: _type = "Task" [ 935.642183] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 935.649762] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365159, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 935.982801] env[62619]: INFO nova.compute.manager [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Took 18.03 seconds to build instance. [ 936.155229] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365159, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.237518] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c7a346c-b9e6-4571-b252-3117d14723d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.245547] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97df6d49-f1ae-45b2-bbd6-931b9b987315 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.249087] env[62619]: DEBUG nova.network.neutron [-] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 936.275870] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "interface-8c07697f-0e20-4ec5-88ec-ec4420906313-9413dc32-2f0c-4650-952a-63bed028a099" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.276153] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "interface-8c07697f-0e20-4ec5-88ec-ec4420906313-9413dc32-2f0c-4650-952a-63bed028a099" acquired by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.278938] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc4b16b8-4e1c-4033-9d12-1a65b3d5804c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.288027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40117301-77fd-4214-a25d-ff85aedd3f27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.301543] env[62619]: DEBUG nova.compute.provider_tree [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 936.486553] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e81c1ec1-abe5-4eb7-805d-b504bca0e712 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.550s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.643799] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "4bc613db-af56-48b4-8c24-7f44428d8b4f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 936.644067] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "4bc613db-af56-48b4-8c24-7f44428d8b4f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.656864] env[62619]: DEBUG nova.compute.manager [req-c32f8964-d99e-4dee-b7aa-367af37eb459 req-219f204d-30ed-4f10-9994-9f30e348945f service nova] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Received event network-vif-deleted-947d01f5-2fdd-4496-9e40-26ff1765031b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 936.660892] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365159, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.531823} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 936.661152] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f/495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 936.661370] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 936.661660] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-2062186a-882a-4e07-86db-41f7884f0b49 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.669148] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 936.669148] env[62619]: value = "task-1365160" [ 936.669148] env[62619]: _type = "Task" [ 936.669148] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.677824] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365160, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 936.751148] env[62619]: INFO nova.compute.manager [-] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Took 1.27 seconds to deallocate network for instance. [ 936.779198] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 936.779448] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 936.780732] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b1f4220a-bac1-4434-b784-9dce11566d4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.799142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b277aafa-3424-431e-8927-e9b110ab5dee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.805076] env[62619]: DEBUG nova.scheduler.client.report [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 936.829251] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Reconfiguring VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1968}} [ 936.830277] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 1.760s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 936.830767] env[62619]: DEBUG nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 936.833538] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c969f2cf-540e-4e9f-a3b7-80519d711546 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 936.846723] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 9.570s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 936.854862] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 936.854862] env[62619]: value = "task-1365161" [ 936.854862] env[62619]: _type = "Task" [ 936.854862] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 936.863887] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.146736] env[62619]: DEBUG nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 937.179114] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365160, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.330615} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 937.179447] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 937.180258] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-055dc193-5b14-4658-a5f2-6e7d59149412 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.205108] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f/495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 937.205615] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f974c6a8-4f2f-4e58-acfb-7dd66576f995 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.224358] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 937.224358] env[62619]: value = "task-1365162" [ 937.224358] env[62619]: _type = "Task" [ 937.224358] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 937.232030] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365162, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.257668] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.350414] env[62619]: DEBUG nova.compute.utils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 937.355171] env[62619]: DEBUG nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 937.355314] env[62619]: DEBUG nova.network.neutron [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 937.366220] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.409009] env[62619]: DEBUG nova.policy [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afafdd2ffb254ed88413f677c6480b27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '382289eb2cfc4cb18bda69f887770db8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 937.543842] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-183f9328-201a-4752-9556-84c6f2143c33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.552607] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cfb98dd7-e937-4e0f-b339-48d7986924d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.582741] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fff42db5-a249-4dd7-8f40-770b7a92a688 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.590466] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abe18bd3-bf7c-45ec-adb8-de4ce6ca5ded {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 937.605265] env[62619]: DEBUG nova.compute.provider_tree [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 937.667093] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 937.695967] env[62619]: DEBUG nova.network.neutron [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Successfully created port: ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 937.735717] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365162, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 937.855447] env[62619]: DEBUG nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 937.868318] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.111724] env[62619]: DEBUG nova.scheduler.client.report [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 938.235180] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365162, 'name': ReconfigVM_Task, 'duration_secs': 0.547569} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.235461] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f/495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 938.236118] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-996f39d4-ac25-484b-8ff3-13addd82cc6a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.242599] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 938.242599] env[62619]: value = "task-1365163" [ 938.242599] env[62619]: _type = "Task" [ 938.242599] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.250657] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365163, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.372410] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.753951] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365163, 'name': Rename_Task, 'duration_secs': 0.132431} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 938.753951] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 938.753951] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-f2acb8ce-c37c-4198-966f-ddabceba1232 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.759581] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 938.759581] env[62619]: value = "task-1365164" [ 938.759581] env[62619]: _type = "Task" [ 938.759581] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 938.767325] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365164, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.868609] env[62619]: DEBUG nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 938.878089] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 938.896238] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 938.896513] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 938.896678] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 938.896867] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 938.897032] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 938.897193] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 938.897406] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 938.897568] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 938.897736] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 938.897904] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 938.898109] env[62619]: DEBUG nova.virt.hardware [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 938.899293] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42e26525-5388-48c6-8765-ee85f6147711 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 938.907048] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-feaa7230-b4cd-43d4-8bba-00ade850b341 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.124574] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.278s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.129339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.966s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.129339] env[62619]: DEBUG nova.objects.instance [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lazy-loading 'resources' on Instance uuid ef1e80cf-2ea2-4764-851a-8aa97563a278 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 939.136755] env[62619]: DEBUG nova.compute.manager [req-d4ead2e0-f0ee-49b2-9463-670f88ba5db8 req-0fc8854f-adda-402f-a9f3-f62eaf85b135 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Received event network-vif-plugged-ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 939.136980] env[62619]: DEBUG oslo_concurrency.lockutils [req-d4ead2e0-f0ee-49b2-9463-670f88ba5db8 req-0fc8854f-adda-402f-a9f3-f62eaf85b135 service nova] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 939.137209] env[62619]: DEBUG oslo_concurrency.lockutils [req-d4ead2e0-f0ee-49b2-9463-670f88ba5db8 req-0fc8854f-adda-402f-a9f3-f62eaf85b135 service nova] Lock "74aa214a-7eda-4613-a394-bc7477d3078e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 939.137371] env[62619]: DEBUG oslo_concurrency.lockutils [req-d4ead2e0-f0ee-49b2-9463-670f88ba5db8 req-0fc8854f-adda-402f-a9f3-f62eaf85b135 service nova] Lock "74aa214a-7eda-4613-a394-bc7477d3078e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 939.137881] env[62619]: DEBUG nova.compute.manager [req-d4ead2e0-f0ee-49b2-9463-670f88ba5db8 req-0fc8854f-adda-402f-a9f3-f62eaf85b135 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] No waiting events found dispatching network-vif-plugged-ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 939.137881] env[62619]: WARNING nova.compute.manager [req-d4ead2e0-f0ee-49b2-9463-670f88ba5db8 req-0fc8854f-adda-402f-a9f3-f62eaf85b135 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Received unexpected event network-vif-plugged-ec268e92-8fac-43c7-b39c-0518ca9a0403 for instance with vm_state building and task_state spawning. [ 939.239116] env[62619]: DEBUG nova.network.neutron [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Successfully updated port: ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 939.270052] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365164, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.375932] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.689719] env[62619]: INFO nova.scheduler.client.report [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted allocation for migration ec366ade-42a0-4f38-a1b8-bddeb5641557 [ 939.743224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 939.743224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 939.743224] env[62619]: DEBUG nova.network.neutron [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 939.772974] env[62619]: DEBUG oslo_vmware.api [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365164, 'name': PowerOnVM_Task, 'duration_secs': 0.997477} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 939.773295] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 939.773295] env[62619]: INFO nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Took 9.09 seconds to spawn the instance on the hypervisor. [ 939.773562] env[62619]: DEBUG nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 939.774288] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49f6524a-4bfe-4875-afed-c1ecd44ff039 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.826976] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b43ad0c-a0db-4a57-9792-3c9ca3123b90 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.835411] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f924ec06-9ba7-40bf-bc0f-2125d90000dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.871422] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf68d388-6456-43e4-b6f1-a7c8095dbbd5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.880155] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e17721f1-206e-448a-98ee-141b0287c6cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 939.885029] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 939.894874] env[62619]: DEBUG nova.compute.provider_tree [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 940.197990] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bb02d420-c439-4434-8949-201775367aad tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 16.442s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.286869] env[62619]: DEBUG nova.network.neutron [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 940.298847] env[62619]: INFO nova.compute.manager [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Took 21.52 seconds to build instance. [ 940.379024] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.398141] env[62619]: DEBUG nova.scheduler.client.report [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 940.438840] env[62619]: DEBUG nova.network.neutron [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updating instance_info_cache with network_info: [{"id": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "address": "fa:16:3e:2b:fd:42", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec268e92-8f", "ovs_interfaceid": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 940.800836] env[62619]: DEBUG oslo_concurrency.lockutils [None req-aff1dc27-9db6-42b4-92d5-1cbe61d3efdf tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.028s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.877467] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 940.903735] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.776s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 940.906131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.648s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 940.906562] env[62619]: DEBUG nova.objects.instance [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lazy-loading 'resources' on Instance uuid c4d0e4fc-9ce2-4ebc-8ede-337f843ec855 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 940.925951] env[62619]: INFO nova.scheduler.client.report [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted allocations for instance ef1e80cf-2ea2-4764-851a-8aa97563a278 [ 940.946397] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Releasing lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 940.946397] env[62619]: DEBUG nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Instance network_info: |[{"id": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "address": "fa:16:3e:2b:fd:42", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec268e92-8f", "ovs_interfaceid": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 940.946856] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2b:fd:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ec268e92-8fac-43c7-b39c-0518ca9a0403', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 940.955513] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Creating folder: Project (382289eb2cfc4cb18bda69f887770db8). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 940.955513] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b4c144fb-d54c-47bf-9bd2-0ada9db0531f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.968627] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Created folder: Project (382289eb2cfc4cb18bda69f887770db8) in parent group-v290436. [ 940.968835] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Creating folder: Instances. Parent ref: group-v290523. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 940.969422] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-eb2a7118-c1ec-4d07-974b-883e76155c8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 940.978198] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Created folder: Instances in parent group-v290523. [ 940.978464] env[62619]: DEBUG oslo.service.loopingcall [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 940.978997] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 940.979197] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5982d782-8a01-45e4-a3fc-f0b96078341c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.001296] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 941.001296] env[62619]: value = "task-1365167" [ 941.001296] env[62619]: _type = "Task" [ 941.001296] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.008546] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365167, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.174708] env[62619]: DEBUG nova.compute.manager [req-b3fa7ea6-de6c-4d7b-94a6-0e57551aa1ad req-f58a5334-f2b5-43e5-8b10-4bbbd245f986 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Received event network-changed-ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 941.175725] env[62619]: DEBUG nova.compute.manager [req-b3fa7ea6-de6c-4d7b-94a6-0e57551aa1ad req-f58a5334-f2b5-43e5-8b10-4bbbd245f986 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Refreshing instance network info cache due to event network-changed-ec268e92-8fac-43c7-b39c-0518ca9a0403. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 941.176016] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3fa7ea6-de6c-4d7b-94a6-0e57551aa1ad req-f58a5334-f2b5-43e5-8b10-4bbbd245f986 service nova] Acquiring lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.176192] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3fa7ea6-de6c-4d7b-94a6-0e57551aa1ad req-f58a5334-f2b5-43e5-8b10-4bbbd245f986 service nova] Acquired lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.176385] env[62619]: DEBUG nova.network.neutron [req-b3fa7ea6-de6c-4d7b-94a6-0e57551aa1ad req-f58a5334-f2b5-43e5-8b10-4bbbd245f986 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Refreshing network info cache for port ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 941.219442] env[62619]: INFO nova.compute.manager [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Rescuing [ 941.219728] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.219887] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.220078] env[62619]: DEBUG nova.network.neutron [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 941.376629] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.433339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ea21c5db-8f03-4a5f-b1fa-721f1c280640 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "ef1e80cf-2ea2-4764-851a-8aa97563a278" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 10.426s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.509814] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365167, 'name': CreateVM_Task, 'duration_secs': 0.308784} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 941.511990] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 941.513151] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 941.513378] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 941.514076] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 941.514153] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4f55e34d-1969-474b-8710-a0617542f100 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.519073] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 941.519073] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5233f577-0f23-943e-8f4d-9d818ecfc60e" [ 941.519073] env[62619]: _type = "Task" [ 941.519073] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.528796] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5233f577-0f23-943e-8f4d-9d818ecfc60e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.551878] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa8a6cc-5286-4bfa-bc4d-e120c6e13f96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.558536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bd7c74a-4b39-4264-b626-d26649fe4f93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.589734] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.589991] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.590278] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 941.590480] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 941.590656] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 941.593327] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e663139-4ecf-474d-8b0e-d43d93153106 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.596110] env[62619]: INFO nova.compute.manager [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Terminating instance [ 941.598139] env[62619]: DEBUG nova.compute.manager [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 941.598340] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 941.599035] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f266ba9b-5769-488e-90ec-91c30a480d81 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.604752] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9968fe5-3755-44aa-9dfe-5380e0989073 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.609951] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 941.610553] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-792ebb47-9339-40fe-a85c-7d90daa29cae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 941.619622] env[62619]: DEBUG nova.compute.provider_tree [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 941.625519] env[62619]: DEBUG oslo_vmware.api [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 941.625519] env[62619]: value = "task-1365168" [ 941.625519] env[62619]: _type = "Task" [ 941.625519] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 941.633358] env[62619]: DEBUG oslo_vmware.api [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365168, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 941.883146] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.012196] env[62619]: DEBUG nova.network.neutron [req-b3fa7ea6-de6c-4d7b-94a6-0e57551aa1ad req-f58a5334-f2b5-43e5-8b10-4bbbd245f986 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updated VIF entry in instance network info cache for port ec268e92-8fac-43c7-b39c-0518ca9a0403. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 942.012512] env[62619]: DEBUG nova.network.neutron [req-b3fa7ea6-de6c-4d7b-94a6-0e57551aa1ad req-f58a5334-f2b5-43e5-8b10-4bbbd245f986 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updating instance_info_cache with network_info: [{"id": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "address": "fa:16:3e:2b:fd:42", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec268e92-8f", "ovs_interfaceid": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.035721] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5233f577-0f23-943e-8f4d-9d818ecfc60e, 'name': SearchDatastore_Task, 'duration_secs': 0.012011} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.036208] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.036460] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 942.036954] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 942.037186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 942.037448] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 942.037648] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-137500c8-9e4a-40c6-be0a-e979f5cc0fc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.047336] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 942.047514] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 942.048237] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d1173d60-b8d9-47f7-b714-5af5f9b40be0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.053563] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 942.053563] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521d3896-5c06-c869-96fb-0d09a4595a24" [ 942.053563] env[62619]: _type = "Task" [ 942.053563] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.064049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.064295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.065360] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521d3896-5c06-c869-96fb-0d09a4595a24, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.066416] env[62619]: DEBUG nova.network.neutron [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Updating instance_info_cache with network_info: [{"id": "b144833e-a8d9-4967-bb72-78e71720d55e", "address": "fa:16:3e:20:f0:33", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb144833e-a8", "ovs_interfaceid": "b144833e-a8d9-4967-bb72-78e71720d55e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 942.123250] env[62619]: DEBUG nova.scheduler.client.report [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 942.136849] env[62619]: DEBUG oslo_vmware.api [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365168, 'name': PowerOffVM_Task, 'duration_secs': 0.187128} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.137127] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 942.137304] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 942.137554] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca3abf03-2053-436f-9726-23c9935164b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.200519] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 942.200519] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 942.200789] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleting the datastore file [datastore1] 11542a9b-6556-4b4b-88fe-26c6be2969f6 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 942.200971] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-27c7e56c-209a-428b-bee0-7eaaf16e2045 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.208237] env[62619]: DEBUG oslo_vmware.api [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 942.208237] env[62619]: value = "task-1365170" [ 942.208237] env[62619]: _type = "Task" [ 942.208237] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.216963] env[62619]: DEBUG oslo_vmware.api [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365170, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.366904] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.367240] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" acquired by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.367439] env[62619]: INFO nova.compute.manager [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Shelving [ 942.377969] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.516094] env[62619]: DEBUG oslo_concurrency.lockutils [req-b3fa7ea6-de6c-4d7b-94a6-0e57551aa1ad req-f58a5334-f2b5-43e5-8b10-4bbbd245f986 service nova] Releasing lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.564836] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521d3896-5c06-c869-96fb-0d09a4595a24, 'name': SearchDatastore_Task, 'duration_secs': 0.008212} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.565663] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cdc48a08-4d94-4768-a336-25d7a3b660bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.568265] env[62619]: DEBUG nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 942.571147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "refresh_cache-495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.577125] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 942.577125] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a5027a-ed89-76ad-50a4-7f4746541103" [ 942.577125] env[62619]: _type = "Task" [ 942.577125] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.586868] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a5027a-ed89-76ad-50a4-7f4746541103, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.632951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.727s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.635609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 4.969s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.637412] env[62619]: INFO nova.compute.claims [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 942.656651] env[62619]: INFO nova.scheduler.client.report [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted allocations for instance c4d0e4fc-9ce2-4ebc-8ede-337f843ec855 [ 942.677031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "1c1b7717-30a9-40c9-913f-6d65a619b94a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.677168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1c1b7717-30a9-40c9-913f-6d65a619b94a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.677501] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "1c1b7717-30a9-40c9-913f-6d65a619b94a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 942.677583] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1c1b7717-30a9-40c9-913f-6d65a619b94a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 942.677784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1c1b7717-30a9-40c9-913f-6d65a619b94a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 942.680290] env[62619]: INFO nova.compute.manager [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Terminating instance [ 942.682333] env[62619]: DEBUG nova.compute.manager [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 942.682536] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 942.683393] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d27ef20e-317e-494a-8f2c-41af90defe66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.691585] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 942.692143] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-73e1eb33-574c-4f11-87ce-9d90fc8366cf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.698197] env[62619]: DEBUG oslo_vmware.api [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 942.698197] env[62619]: value = "task-1365171" [ 942.698197] env[62619]: _type = "Task" [ 942.698197] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.707784] env[62619]: DEBUG oslo_vmware.api [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365171, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 942.718047] env[62619]: DEBUG oslo_vmware.api [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365170, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.158699} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.718325] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 942.718520] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 942.718701] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 942.718877] env[62619]: INFO nova.compute.manager [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Took 1.12 seconds to destroy the instance on the hypervisor. [ 942.719154] env[62619]: DEBUG oslo.service.loopingcall [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 942.719356] env[62619]: DEBUG nova.compute.manager [-] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 942.719453] env[62619]: DEBUG nova.network.neutron [-] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 942.882152] env[62619]: DEBUG oslo_vmware.api [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365161, 'name': ReconfigVM_Task, 'duration_secs': 5.821197} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 942.882758] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 942.882951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 942.883176] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Reconfigured VM to detach interface {{(pid=62619) detach_interface /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1978}} [ 942.885307] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-b1f1add2-ce20-4c44-acfb-c5d349436341 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 942.892185] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 942.892185] env[62619]: value = "task-1365172" [ 942.892185] env[62619]: _type = "Task" [ 942.892185] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 942.902342] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365172, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.088313] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a5027a-ed89-76ad-50a4-7f4746541103, 'name': SearchDatastore_Task, 'duration_secs': 0.012766} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.088610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 943.089046] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 74aa214a-7eda-4613-a394-bc7477d3078e/74aa214a-7eda-4613-a394-bc7477d3078e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 943.091132] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.091338] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71aa4402-275a-40b7-89e2-7d1c415853d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.098821] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 943.098821] env[62619]: value = "task-1365173" [ 943.098821] env[62619]: _type = "Task" [ 943.098821] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.104877] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.105650] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c52569ea-891f-4a87-a3c3-6601879c4698 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.110470] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365173, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.115052] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 943.115052] env[62619]: value = "task-1365174" [ 943.115052] env[62619]: _type = "Task" [ 943.115052] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.123714] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365174, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.164330] env[62619]: DEBUG oslo_concurrency.lockutils [None req-16f9d150-9e41-40e7-ba14-94b8fb62222f tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "c4d0e4fc-9ce2-4ebc-8ede-337f843ec855" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.810s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.185267] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.185898] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.208058] env[62619]: DEBUG nova.compute.manager [req-930a503c-3aab-4bff-a1a4-6b3ea5ba5404 req-bdf3fa38-aef4-4cc3-8417-aaeea3d80c8e service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Received event network-vif-deleted-a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 943.208401] env[62619]: INFO nova.compute.manager [req-930a503c-3aab-4bff-a1a4-6b3ea5ba5404 req-bdf3fa38-aef4-4cc3-8417-aaeea3d80c8e service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Neutron deleted interface a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff; detaching it from the instance and deleting it from the info cache [ 943.208458] env[62619]: DEBUG nova.network.neutron [req-930a503c-3aab-4bff-a1a4-6b3ea5ba5404 req-bdf3fa38-aef4-4cc3-8417-aaeea3d80c8e service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.219860] env[62619]: DEBUG oslo_vmware.api [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365171, 'name': PowerOffVM_Task, 'duration_secs': 0.33064} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.220211] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.220487] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 943.220832] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1a7d7303-cf56-407b-a892-b7fd2ce4ef00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.404250] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365172, 'name': PowerOffVM_Task, 'duration_secs': 0.189942} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.404609] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.405548] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fbd6333-cd8f-464b-809a-1e7051ecfac1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.430636] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093de2ec-ccdc-48fe-ae2e-1fab0771d911 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.563976] env[62619]: DEBUG nova.network.neutron [-] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 943.612054] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365173, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.627153] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365174, 'name': PowerOffVM_Task, 'duration_secs': 0.20309} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 943.627382] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 943.628284] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85e5735a-d024-4758-b380-6c9344807e78 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.649060] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-927df4a2-954b-4e04-b6c4-21863a1f444a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.688051] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.688458] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-61966a72-4a84-4821-9fc0-69c4230103ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.693378] env[62619]: DEBUG nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 943.704163] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 943.704163] env[62619]: value = "task-1365176" [ 943.704163] env[62619]: _type = "Task" [ 943.704163] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.716861] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b8a32e7c-8ca0-46d9-b4ee-8c6967fca0d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.718806] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 943.719155] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 943.719283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 943.719428] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 943.719603] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 943.720185] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a81287bf-7aaa-4387-b51a-290970e1f0a0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.728971] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6eeb840f-26e1-415b-830c-f68b7943a732 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.743836] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 943.744078] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 943.745811] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-36fb1dd3-5272-40fd-889c-258dbb06f64e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.751346] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 943.751346] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525b36b0-c6bf-1bf7-b817-02864070e0e1" [ 943.751346] env[62619]: _type = "Task" [ 943.751346] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.765319] env[62619]: DEBUG nova.compute.manager [req-930a503c-3aab-4bff-a1a4-6b3ea5ba5404 req-bdf3fa38-aef4-4cc3-8417-aaeea3d80c8e service nova] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Detach interface failed, port_id=a0389ef1-cfdc-4714-aa2a-9e1f8abc8cff, reason: Instance 11542a9b-6556-4b4b-88fe-26c6be2969f6 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 943.772066] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525b36b0-c6bf-1bf7-b817-02864070e0e1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.849790] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb86dd61-47f8-45a1-a468-1286717c1f40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.857348] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dba4632-1772-4ddc-81f5-aaf0b7cf6903 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.886346] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7934ae01-9abe-447c-9e49-02c5eeb2dc50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.892859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "8c07697f-0e20-4ec5-88ec-ec4420906313" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.893099] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.893319] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 943.893510] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 943.894230] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 943.896557] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1889fa2-aafc-4b5d-affb-90200560646c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.900654] env[62619]: INFO nova.compute.manager [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Terminating instance [ 943.902752] env[62619]: DEBUG nova.compute.manager [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 943.902930] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 943.903702] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6e9fa6b-5a4a-478b-8893-4b2b0dab1740 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.915332] env[62619]: DEBUG nova.compute.provider_tree [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 943.920907] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 943.920907] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-71866726-0678-449f-a417-2625ea0ad1fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.927656] env[62619]: DEBUG oslo_vmware.api [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 943.927656] env[62619]: value = "task-1365177" [ 943.927656] env[62619]: _type = "Task" [ 943.927656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.936346] env[62619]: DEBUG oslo_vmware.api [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365177, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 943.942735] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 943.943044] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-4fbebffd-822b-48f4-afa4-a791cbb3b1d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 943.949762] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 943.949762] env[62619]: value = "task-1365178" [ 943.949762] env[62619]: _type = "Task" [ 943.949762] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 943.958058] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365178, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.067364] env[62619]: INFO nova.compute.manager [-] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Took 1.35 seconds to deallocate network for instance. [ 944.111524] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365173, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.644105} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.111757] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 74aa214a-7eda-4613-a394-bc7477d3078e/74aa214a-7eda-4613-a394-bc7477d3078e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 944.112022] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 944.112274] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-68768f27-e073-4525-8d7b-d7e1031f0885 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.120553] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 944.120553] env[62619]: value = "task-1365179" [ 944.120553] env[62619]: _type = "Task" [ 944.120553] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.127812] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365179, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.213551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.217116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.217544] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.217544] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleting the datastore file [datastore1] 1c1b7717-30a9-40c9-913f-6d65a619b94a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.217771] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37fad750-2d91-40bd-8f8d-a5cd1eb169ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.224350] env[62619]: DEBUG oslo_vmware.api [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 944.224350] env[62619]: value = "task-1365180" [ 944.224350] env[62619]: _type = "Task" [ 944.224350] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.235678] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.235932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.237172] env[62619]: DEBUG oslo_vmware.api [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365180, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.261386] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525b36b0-c6bf-1bf7-b817-02864070e0e1, 'name': SearchDatastore_Task, 'duration_secs': 0.018112} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.262146] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc0b34f9-702b-4db3-8468-297c323a947e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.267270] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 944.267270] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521d0e07-efe4-4103-0d7e-95f6c3222fa3" [ 944.267270] env[62619]: _type = "Task" [ 944.267270] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.274751] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 944.274920] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquired lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 944.275111] env[62619]: DEBUG nova.network.neutron [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 944.276282] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521d0e07-efe4-4103-0d7e-95f6c3222fa3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.418068] env[62619]: DEBUG nova.scheduler.client.report [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 944.437844] env[62619]: DEBUG oslo_vmware.api [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365177, 'name': PowerOffVM_Task, 'duration_secs': 0.225996} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.438135] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 944.438313] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 944.438570] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-55a6c573-9add-4e82-9800-d1b368667147 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.457552] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365178, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.507293] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 944.507605] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 944.507795] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Deleting the datastore file [datastore2] 8c07697f-0e20-4ec5-88ec-ec4420906313 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 944.508080] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-bd6313dd-c03d-4744-b6fb-5a35ab4edb02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.514258] env[62619]: DEBUG oslo_vmware.api [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 944.514258] env[62619]: value = "task-1365182" [ 944.514258] env[62619]: _type = "Task" [ 944.514258] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.521679] env[62619]: DEBUG oslo_vmware.api [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365182, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.574546] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 944.629605] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365179, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.629965] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 944.630796] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adfbe316-a878-4ee0-b69d-76390420a041 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.653344] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Reconfiguring VM instance instance-00000059 to attach disk [datastore2] 74aa214a-7eda-4613-a394-bc7477d3078e/74aa214a-7eda-4613-a394-bc7477d3078e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 944.653652] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-d3b2b1c5-de6e-4cee-a898-826132268eed {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.672210] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 944.672210] env[62619]: value = "task-1365183" [ 944.672210] env[62619]: _type = "Task" [ 944.672210] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.682086] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365183, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.734699] env[62619]: DEBUG oslo_vmware.api [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365180, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15303} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.735010] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 944.735221] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 944.735405] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 944.735585] env[62619]: INFO nova.compute.manager [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Took 2.05 seconds to destroy the instance on the hypervisor. [ 944.735825] env[62619]: DEBUG oslo.service.loopingcall [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 944.736028] env[62619]: DEBUG nova.compute.manager [-] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 944.736126] env[62619]: DEBUG nova.network.neutron [-] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 944.737974] env[62619]: DEBUG nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 944.779401] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]521d0e07-efe4-4103-0d7e-95f6c3222fa3, 'name': SearchDatastore_Task, 'duration_secs': 0.008975} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.780376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 944.780666] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 944.781257] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3b034321-16ab-41f2-8ff5-7f61b1932a27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 944.788236] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 944.788236] env[62619]: value = "task-1365184" [ 944.788236] env[62619]: _type = "Task" [ 944.788236] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 944.796487] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365184, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 944.923016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 944.923673] env[62619]: DEBUG nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 944.926418] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.835s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 944.930108] env[62619]: INFO nova.compute.claims [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 944.960023] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365178, 'name': CreateSnapshot_Task, 'duration_secs': 0.861902} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 944.960388] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 944.961191] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53156c22-285b-47ed-9a04-87381d44b942 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.024613] env[62619]: DEBUG oslo_vmware.api [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365182, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14666} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.024849] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 945.025059] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 945.025252] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 945.025449] env[62619]: INFO nova.compute.manager [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Took 1.12 seconds to destroy the instance on the hypervisor. [ 945.025705] env[62619]: DEBUG oslo.service.loopingcall [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 945.028511] env[62619]: DEBUG nova.compute.manager [-] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 945.028638] env[62619]: DEBUG nova.network.neutron [-] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 945.139684] env[62619]: INFO nova.network.neutron [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Port 9413dc32-2f0c-4650-952a-63bed028a099 from network info_cache is no longer associated with instance in Neutron. Removing from network info_cache. [ 945.140354] env[62619]: DEBUG nova.network.neutron [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [{"id": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "address": "fa:16:3e:7b:59:0e", "network": {"id": "f7c32aa4-863f-481c-899f-debfaa1844da", "bridge": "br-int", "label": "tempest-AttachInterfacesTestJSON-581309961-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.133", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4270942193cd4a9aa397784368b9ae64", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ba866c99-1cb2-4588-9f76-4bc0421ed46a", "external-id": "nsx-vlan-transportzone-92", "segmentation_id": 92, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapd70279ce-58", "ovs_interfaceid": "d70279ce-58c0-45c4-9a74-8f1f74552d21", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.182971] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365183, 'name': ReconfigVM_Task, 'duration_secs': 0.263324} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.183233] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Reconfigured VM instance instance-00000059 to attach disk [datastore2] 74aa214a-7eda-4613-a394-bc7477d3078e/74aa214a-7eda-4613-a394-bc7477d3078e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.183878] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-8d9ea9c1-03bd-49dd-bbd7-564a3419453f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.191162] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 945.191162] env[62619]: value = "task-1365185" [ 945.191162] env[62619]: _type = "Task" [ 945.191162] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.200036] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365185, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.235821] env[62619]: DEBUG nova.compute.manager [req-a964035d-be99-428f-a700-956e41c38a08 req-0a757bb0-3b12-43b1-87ac-a1c6af5e5a79 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Received event network-vif-deleted-65733022-a28a-4fb7-8d06-564479643fc0 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 945.236049] env[62619]: INFO nova.compute.manager [req-a964035d-be99-428f-a700-956e41c38a08 req-0a757bb0-3b12-43b1-87ac-a1c6af5e5a79 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Neutron deleted interface 65733022-a28a-4fb7-8d06-564479643fc0; detaching it from the instance and deleting it from the info cache [ 945.236217] env[62619]: DEBUG nova.network.neutron [req-a964035d-be99-428f-a700-956e41c38a08 req-0a757bb0-3b12-43b1-87ac-a1c6af5e5a79 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.264864] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 945.298611] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365184, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.463311} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.298838] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk. [ 945.299635] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-42fc2113-879a-40fa-b97b-c1e4724bc7c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.325787] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Reconfiguring VM instance instance-00000058 to attach disk [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 945.326475] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b831f34-9eef-4e9a-b390-325a268186c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.344916] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 945.344916] env[62619]: value = "task-1365186" [ 945.344916] env[62619]: _type = "Task" [ 945.344916] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.354299] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365186, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.432172] env[62619]: DEBUG nova.compute.utils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 945.435857] env[62619]: DEBUG nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 945.436050] env[62619]: DEBUG nova.network.neutron [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 945.478721] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 945.479236] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-cc7fe2f0-2009-4d26-b511-e04284622668 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.484690] env[62619]: DEBUG nova.policy [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ce526a1d824fe8b6573fa80adcd53f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33242a5e0a764cf3b8af687fc4302e8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 945.488396] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 945.488396] env[62619]: value = "task-1365187" [ 945.488396] env[62619]: _type = "Task" [ 945.488396] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.497094] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365187, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.642968] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Releasing lock "refresh_cache-8c07697f-0e20-4ec5-88ec-ec4420906313" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 945.651342] env[62619]: DEBUG nova.network.neutron [-] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 945.702403] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365185, 'name': Rename_Task, 'duration_secs': 0.133645} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.702403] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 945.703265] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-46a34097-6172-42a2-9f8d-5cb0849b653e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.709305] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 945.709305] env[62619]: value = "task-1365188" [ 945.709305] env[62619]: _type = "Task" [ 945.709305] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.717042] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365188, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.737619] env[62619]: DEBUG nova.network.neutron [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Successfully created port: eaa83ebe-4c4d-40e5-9883-5f85c6f71217 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 945.740051] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8fe39873-7d09-47ac-8562-d212e79d2184 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.749044] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bf0c870-d4e4-49b0-b279-c4595a85f684 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.777087] env[62619]: DEBUG nova.compute.manager [req-a964035d-be99-428f-a700-956e41c38a08 req-0a757bb0-3b12-43b1-87ac-a1c6af5e5a79 service nova] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Detach interface failed, port_id=65733022-a28a-4fb7-8d06-564479643fc0, reason: Instance 1c1b7717-30a9-40c9-913f-6d65a619b94a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 945.860488] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365186, 'name': ReconfigVM_Task, 'duration_secs': 0.464796} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 945.860791] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Reconfigured VM instance instance-00000058 to attach disk [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 945.865782] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e47bc303-aa58-47ed-935a-c940ac0f6aff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.893469] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fa8acabe-75c0-45d4-a1c1-caf6596ac22b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 945.910289] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 945.910289] env[62619]: value = "task-1365189" [ 945.910289] env[62619]: _type = "Task" [ 945.910289] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 945.919170] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365189, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 945.939029] env[62619]: DEBUG nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 946.001244] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365187, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.113437] env[62619]: DEBUG nova.compute.manager [req-5163fe47-7030-435c-a92c-fb895f5800c5 req-dfe7bb18-7f66-4156-b8cd-cd70df3e6182 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Received event network-vif-deleted-d70279ce-58c0-45c4-9a74-8f1f74552d21 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 946.113822] env[62619]: INFO nova.compute.manager [req-5163fe47-7030-435c-a92c-fb895f5800c5 req-dfe7bb18-7f66-4156-b8cd-cd70df3e6182 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Neutron deleted interface d70279ce-58c0-45c4-9a74-8f1f74552d21; detaching it from the instance and deleting it from the info cache [ 946.114101] env[62619]: DEBUG nova.network.neutron [req-5163fe47-7030-435c-a92c-fb895f5800c5 req-dfe7bb18-7f66-4156-b8cd-cd70df3e6182 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.146939] env[62619]: DEBUG oslo_concurrency.lockutils [None req-626b514c-688f-4532-9bae-4176782a32b3 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "interface-8c07697f-0e20-4ec5-88ec-ec4420906313-9413dc32-2f0c-4650-952a-63bed028a099" "released" by "nova.compute.manager.ComputeManager.detach_interface..do_detach_interface" :: held 9.870s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 946.154336] env[62619]: INFO nova.compute.manager [-] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Took 1.42 seconds to deallocate network for instance. [ 946.173728] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c1898d6-586e-450c-a731-9218424137ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.188367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2297f50-065b-4119-83bb-5b426f3afb87 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.225170] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-922dbc43-3e60-4a1d-8643-1f5507d8d728 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.236106] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-127def66-2e6a-4029-b1b3-f773b9f6c536 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.240045] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365188, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.252762] env[62619]: DEBUG nova.compute.provider_tree [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 946.420965] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365189, 'name': ReconfigVM_Task, 'duration_secs': 0.207933} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.421290] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 946.421749] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-919903de-c98b-48d0-8a62-8e0d28844293 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.427540] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 946.427540] env[62619]: value = "task-1365190" [ 946.427540] env[62619]: _type = "Task" [ 946.427540] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 946.435147] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365190, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.501832] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365187, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.589994] env[62619]: DEBUG nova.network.neutron [-] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 946.616456] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b6f431a4-2a59-4e15-aaaf-ada5890d4c70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.626022] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24b11f1e-6db0-45da-9139-bbbaad792b71 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.652175] env[62619]: DEBUG nova.compute.manager [req-5163fe47-7030-435c-a92c-fb895f5800c5 req-dfe7bb18-7f66-4156-b8cd-cd70df3e6182 service nova] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Detach interface failed, port_id=d70279ce-58c0-45c4-9a74-8f1f74552d21, reason: Instance 8c07697f-0e20-4ec5-88ec-ec4420906313 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 946.664533] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 946.730839] env[62619]: DEBUG oslo_vmware.api [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365188, 'name': PowerOnVM_Task, 'duration_secs': 0.529003} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 946.731222] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 946.731487] env[62619]: INFO nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Took 7.86 seconds to spawn the instance on the hypervisor. [ 946.731718] env[62619]: DEBUG nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 946.732610] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1ef907e-a330-4b4d-af3e-30703083d7f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.756253] env[62619]: DEBUG nova.scheduler.client.report [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 946.937643] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365190, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 946.950040] env[62619]: DEBUG nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 946.974837] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 946.975128] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 946.975299] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 946.975487] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 946.975636] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 946.975786] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 946.975995] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 946.976176] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 946.976347] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 946.976510] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 946.976684] env[62619]: DEBUG nova.virt.hardware [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 946.977566] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1cde279-df1d-438e-887c-e5fc3367ddf4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 946.985291] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7976547a-dd04-479f-9a4c-65a37985d6b9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.008155] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365187, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.094629] env[62619]: INFO nova.compute.manager [-] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Took 2.07 seconds to deallocate network for instance. [ 947.230465] env[62619]: DEBUG nova.network.neutron [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Successfully updated port: eaa83ebe-4c4d-40e5-9883-5f85c6f71217 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 947.251806] env[62619]: INFO nova.compute.manager [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Took 21.05 seconds to build instance. [ 947.261522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.335s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.261757] env[62619]: DEBUG nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 947.264864] env[62619]: DEBUG nova.compute.manager [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Received event network-vif-plugged-eaa83ebe-4c4d-40e5-9883-5f85c6f71217 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.265083] env[62619]: DEBUG oslo_concurrency.lockutils [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] Acquiring lock "4bc613db-af56-48b4-8c24-7f44428d8b4f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.265302] env[62619]: DEBUG oslo_concurrency.lockutils [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] Lock "4bc613db-af56-48b4-8c24-7f44428d8b4f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.265472] env[62619]: DEBUG oslo_concurrency.lockutils [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] Lock "4bc613db-af56-48b4-8c24-7f44428d8b4f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.265641] env[62619]: DEBUG nova.compute.manager [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] No waiting events found dispatching network-vif-plugged-eaa83ebe-4c4d-40e5-9883-5f85c6f71217 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 947.265811] env[62619]: WARNING nova.compute.manager [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Received unexpected event network-vif-plugged-eaa83ebe-4c4d-40e5-9883-5f85c6f71217 for instance with vm_state building and task_state spawning. [ 947.265971] env[62619]: DEBUG nova.compute.manager [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Received event network-changed-eaa83ebe-4c4d-40e5-9883-5f85c6f71217 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 947.266144] env[62619]: DEBUG nova.compute.manager [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Refreshing instance network info cache due to event network-changed-eaa83ebe-4c4d-40e5-9883-5f85c6f71217. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 947.266334] env[62619]: DEBUG oslo_concurrency.lockutils [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] Acquiring lock "refresh_cache-4bc613db-af56-48b4-8c24-7f44428d8b4f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.266491] env[62619]: DEBUG oslo_concurrency.lockutils [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] Acquired lock "refresh_cache-4bc613db-af56-48b4-8c24-7f44428d8b4f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 947.266716] env[62619]: DEBUG nova.network.neutron [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Refreshing network info cache for port eaa83ebe-4c4d-40e5-9883-5f85c6f71217 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 947.268144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.055s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 947.269466] env[62619]: INFO nova.compute.claims [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 947.440325] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365190, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 947.501080] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365187, 'name': CloneVM_Task, 'duration_secs': 1.637725} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.501376] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Created linked-clone VM from snapshot [ 947.502140] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4d77b9f-1dff-4f84-9972-981c5a26cce0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.509048] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Uploading image 9845866c-f4a3-4a1c-a044-56cf34137267 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 947.529068] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating HttpNfcLease lease for exporting VM: (result){ [ 947.529068] env[62619]: value = "vm-290527" [ 947.529068] env[62619]: _type = "VirtualMachine" [ 947.529068] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:478}} [ 947.529415] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ExportVm with opID=oslo.vmware-b48eac4e-35c4-4d42-824a-2d611e2c3f0a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.536722] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lease: (returnval){ [ 947.536722] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fb2eb7-a78b-b8c1-1cc7-58c529e08d28" [ 947.536722] env[62619]: _type = "HttpNfcLease" [ 947.536722] env[62619]: } obtained for exporting VM: (result){ [ 947.536722] env[62619]: value = "vm-290527" [ 947.536722] env[62619]: _type = "VirtualMachine" [ 947.536722] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:481}} [ 947.536993] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the lease: (returnval){ [ 947.536993] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fb2eb7-a78b-b8c1-1cc7-58c529e08d28" [ 947.536993] env[62619]: _type = "HttpNfcLease" [ 947.536993] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 947.543864] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 947.543864] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fb2eb7-a78b-b8c1-1cc7-58c529e08d28" [ 947.543864] env[62619]: _type = "HttpNfcLease" [ 947.543864] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 947.600876] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 947.733759] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-4bc613db-af56-48b4-8c24-7f44428d8b4f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 947.753486] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c71dafe5-0a0b-42be-b9a2-34d16126804b tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 22.561s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 947.769005] env[62619]: DEBUG nova.compute.utils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 947.770526] env[62619]: DEBUG nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 947.770683] env[62619]: DEBUG nova.network.neutron [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 947.831093] env[62619]: DEBUG nova.policy [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1ab4be10d444359a7a3b245ec9b9ea0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c583f4e7b29743aabd3e96f7c53fa04f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 947.831490] env[62619]: DEBUG nova.network.neutron [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 947.939843] env[62619]: DEBUG oslo_vmware.api [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365190, 'name': PowerOnVM_Task, 'duration_secs': 1.052347} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 947.940262] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 947.943463] env[62619]: DEBUG nova.compute.manager [None req-3ad40f7d-623f-4ac1-8ebe-245a184a2376 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 947.944411] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e29642b-958f-4047-a167-2f7458b97f0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 947.983710] env[62619]: DEBUG nova.network.neutron [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 948.044938] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 948.044938] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fb2eb7-a78b-b8c1-1cc7-58c529e08d28" [ 948.044938] env[62619]: _type = "HttpNfcLease" [ 948.044938] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 948.045271] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 948.045271] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52fb2eb7-a78b-b8c1-1cc7-58c529e08d28" [ 948.045271] env[62619]: _type = "HttpNfcLease" [ 948.045271] env[62619]: }. {{(pid=62619) _create_export_vm_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:486}} [ 948.046008] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f53f28f-6874-4970-9c51-d2c48a854650 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.055682] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226b4e2-7822-c096-c04c-75fd0e1945d7/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 948.055946] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Opening URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226b4e2-7822-c096-c04c-75fd0e1945d7/disk-0.vmdk for reading. {{(pid=62619) _create_read_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:99}} [ 948.121908] env[62619]: DEBUG nova.network.neutron [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Successfully created port: c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 948.160492] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-69e24b28-a205-432a-812c-d43590a18845 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.274485] env[62619]: DEBUG nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 948.436255] env[62619]: DEBUG nova.compute.manager [req-99d4e9d7-33f4-4070-9aac-f87680a07ae8 req-f8956290-e470-4eb3-8ef8-6524c237d308 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Received event network-changed-ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 948.437021] env[62619]: DEBUG nova.compute.manager [req-99d4e9d7-33f4-4070-9aac-f87680a07ae8 req-f8956290-e470-4eb3-8ef8-6524c237d308 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Refreshing instance network info cache due to event network-changed-ec268e92-8fac-43c7-b39c-0518ca9a0403. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 948.437021] env[62619]: DEBUG oslo_concurrency.lockutils [req-99d4e9d7-33f4-4070-9aac-f87680a07ae8 req-f8956290-e470-4eb3-8ef8-6524c237d308 service nova] Acquiring lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 948.437221] env[62619]: DEBUG oslo_concurrency.lockutils [req-99d4e9d7-33f4-4070-9aac-f87680a07ae8 req-f8956290-e470-4eb3-8ef8-6524c237d308 service nova] Acquired lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.437537] env[62619]: DEBUG nova.network.neutron [req-99d4e9d7-33f4-4070-9aac-f87680a07ae8 req-f8956290-e470-4eb3-8ef8-6524c237d308 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Refreshing network info cache for port ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 948.486119] env[62619]: DEBUG oslo_concurrency.lockutils [req-48058f9f-7834-430c-9682-c42535e52e9d req-dc64231e-691b-4e55-8bf7-04cfda19a30f service nova] Releasing lock "refresh_cache-4bc613db-af56-48b4-8c24-7f44428d8b4f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 948.486791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-4bc613db-af56-48b4-8c24-7f44428d8b4f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 948.486954] env[62619]: DEBUG nova.network.neutron [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 948.491526] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf40e4cb-abb5-419f-8dfb-6981d8ad729b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.499571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7e2a70e-0998-4c8a-81aa-0d70515f942f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.530787] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a572a7b1-fbaf-4214-a14e-eb3421563416 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.538657] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a187e1c4-f85a-4687-bc80-a3e0c9a5639b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 948.554634] env[62619]: DEBUG nova.compute.provider_tree [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 949.039928] env[62619]: DEBUG nova.network.neutron [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 949.082476] env[62619]: ERROR nova.scheduler.client.report [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [req-c4c310ae-9681-492b-bfbd-e9037a6505d8] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-c4c310ae-9681-492b-bfbd-e9037a6505d8"}]} [ 949.104524] env[62619]: DEBUG nova.scheduler.client.report [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 949.126294] env[62619]: DEBUG nova.scheduler.client.report [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 949.126682] env[62619]: DEBUG nova.compute.provider_tree [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 949.147865] env[62619]: DEBUG nova.scheduler.client.report [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 949.170052] env[62619]: DEBUG nova.scheduler.client.report [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 949.212992] env[62619]: DEBUG nova.network.neutron [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Updating instance_info_cache with network_info: [{"id": "eaa83ebe-4c4d-40e5-9883-5f85c6f71217", "address": "fa:16:3e:57:52:5e", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaa83ebe-4c", "ovs_interfaceid": "eaa83ebe-4c4d-40e5-9883-5f85c6f71217", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.279259] env[62619]: DEBUG nova.network.neutron [req-99d4e9d7-33f4-4070-9aac-f87680a07ae8 req-f8956290-e470-4eb3-8ef8-6524c237d308 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updated VIF entry in instance network info cache for port ec268e92-8fac-43c7-b39c-0518ca9a0403. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 949.279691] env[62619]: DEBUG nova.network.neutron [req-99d4e9d7-33f4-4070-9aac-f87680a07ae8 req-f8956290-e470-4eb3-8ef8-6524c237d308 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updating instance_info_cache with network_info: [{"id": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "address": "fa:16:3e:2b:fd:42", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec268e92-8f", "ovs_interfaceid": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 949.289178] env[62619]: DEBUG nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 949.310299] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 949.310589] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 949.310749] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 949.310931] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 949.311225] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 949.311225] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 949.311450] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 949.311631] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 949.311803] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 949.311969] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 949.312202] env[62619]: DEBUG nova.virt.hardware [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 949.313524] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce363d4a-aa83-4c7b-b8c9-2e4af0acd1ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.325163] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a82bf59-298b-4887-9392-bfc39cc3cb3e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.380768] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b772fc97-3618-4e05-8e13-a6ef8c578575 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.388550] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7938229d-adff-44db-a6c7-13b50acf30dd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.421578] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d20bca97-4e8f-4090-be11-d3c57912bb18 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.431781] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e24cfc8-abc5-41ea-9ca9-11926927bc56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.451033] env[62619]: DEBUG nova.compute.provider_tree [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 949.563869] env[62619]: DEBUG nova.compute.manager [req-acae98b8-9fb4-413b-b87b-93946908edb3 req-aa8877d2-7a14-4b13-bfb9-4219b1b51669 service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Received event network-vif-plugged-c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 949.564119] env[62619]: DEBUG oslo_concurrency.lockutils [req-acae98b8-9fb4-413b-b87b-93946908edb3 req-aa8877d2-7a14-4b13-bfb9-4219b1b51669 service nova] Acquiring lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 949.564803] env[62619]: DEBUG oslo_concurrency.lockutils [req-acae98b8-9fb4-413b-b87b-93946908edb3 req-aa8877d2-7a14-4b13-bfb9-4219b1b51669 service nova] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 949.565048] env[62619]: DEBUG oslo_concurrency.lockutils [req-acae98b8-9fb4-413b-b87b-93946908edb3 req-aa8877d2-7a14-4b13-bfb9-4219b1b51669 service nova] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 949.565245] env[62619]: DEBUG nova.compute.manager [req-acae98b8-9fb4-413b-b87b-93946908edb3 req-aa8877d2-7a14-4b13-bfb9-4219b1b51669 service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] No waiting events found dispatching network-vif-plugged-c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 949.565508] env[62619]: WARNING nova.compute.manager [req-acae98b8-9fb4-413b-b87b-93946908edb3 req-aa8877d2-7a14-4b13-bfb9-4219b1b51669 service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Received unexpected event network-vif-plugged-c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 for instance with vm_state building and task_state spawning. [ 949.715695] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-4bc613db-af56-48b4-8c24-7f44428d8b4f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.716011] env[62619]: DEBUG nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Instance network_info: |[{"id": "eaa83ebe-4c4d-40e5-9883-5f85c6f71217", "address": "fa:16:3e:57:52:5e", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapeaa83ebe-4c", "ovs_interfaceid": "eaa83ebe-4c4d-40e5-9883-5f85c6f71217", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 949.716571] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:57:52:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57c65f87-60fd-4882-ab30-31db49131b46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'eaa83ebe-4c4d-40e5-9883-5f85c6f71217', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 949.724650] env[62619]: DEBUG oslo.service.loopingcall [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 949.725289] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 949.725624] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8f8fd46b-074a-4da3-b11e-f30cafb72b7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 949.745971] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 949.745971] env[62619]: value = "task-1365192" [ 949.745971] env[62619]: _type = "Task" [ 949.745971] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 949.753906] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365192, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 949.782788] env[62619]: DEBUG oslo_concurrency.lockutils [req-99d4e9d7-33f4-4070-9aac-f87680a07ae8 req-f8956290-e470-4eb3-8ef8-6524c237d308 service nova] Releasing lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 949.985607] env[62619]: DEBUG nova.scheduler.client.report [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 110 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 949.985888] env[62619]: DEBUG nova.compute.provider_tree [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 110 to 111 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 949.986084] env[62619]: DEBUG nova.compute.provider_tree [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 950.101638] env[62619]: DEBUG nova.network.neutron [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Successfully updated port: c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 950.126924] env[62619]: DEBUG nova.compute.manager [req-b0c3d112-284e-4d59-92e0-3382354ab882 req-c3b20503-3885-469f-af28-40a7272fb49b service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Received event network-changed-c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 950.127288] env[62619]: DEBUG nova.compute.manager [req-b0c3d112-284e-4d59-92e0-3382354ab882 req-c3b20503-3885-469f-af28-40a7272fb49b service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Refreshing instance network info cache due to event network-changed-c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 950.127812] env[62619]: DEBUG oslo_concurrency.lockutils [req-b0c3d112-284e-4d59-92e0-3382354ab882 req-c3b20503-3885-469f-af28-40a7272fb49b service nova] Acquiring lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.128138] env[62619]: DEBUG oslo_concurrency.lockutils [req-b0c3d112-284e-4d59-92e0-3382354ab882 req-c3b20503-3885-469f-af28-40a7272fb49b service nova] Acquired lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.128609] env[62619]: DEBUG nova.network.neutron [req-b0c3d112-284e-4d59-92e0-3382354ab882 req-c3b20503-3885-469f-af28-40a7272fb49b service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Refreshing network info cache for port c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 950.258482] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365192, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.491590] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.223s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.492223] env[62619]: DEBUG nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 950.495808] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.921s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.496042] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 950.498040] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.233s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 950.499544] env[62619]: INFO nova.compute.claims [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 950.523920] env[62619]: INFO nova.scheduler.client.report [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted allocations for instance 11542a9b-6556-4b4b-88fe-26c6be2969f6 [ 950.606131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.665156] env[62619]: DEBUG nova.network.neutron [req-b0c3d112-284e-4d59-92e0-3382354ab882 req-c3b20503-3885-469f-af28-40a7272fb49b service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 950.759780] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365192, 'name': CreateVM_Task, 'duration_secs': 0.632805} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 950.760096] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 950.760852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.761065] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.761500] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 950.761790] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-abbd797c-dd52-4d06-9d21-6957cf05ed8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 950.766764] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 950.766764] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52762843-3efc-2ee9-6d00-56af2b4984e5" [ 950.766764] env[62619]: _type = "Task" [ 950.766764] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 950.775821] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52762843-3efc-2ee9-6d00-56af2b4984e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 950.923919] env[62619]: DEBUG nova.network.neutron [req-b0c3d112-284e-4d59-92e0-3382354ab882 req-c3b20503-3885-469f-af28-40a7272fb49b service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 950.927939] env[62619]: INFO nova.compute.manager [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Rescuing [ 950.928255] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 950.928433] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 950.928598] env[62619]: DEBUG nova.network.neutron [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 950.997777] env[62619]: DEBUG nova.compute.utils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 950.999747] env[62619]: DEBUG nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 950.999948] env[62619]: DEBUG nova.network.neutron [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 951.031535] env[62619]: DEBUG oslo_concurrency.lockutils [None req-26700eba-696d-413e-b358-50aa51fb8c63 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "11542a9b-6556-4b4b-88fe-26c6be2969f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.441s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 951.039754] env[62619]: DEBUG nova.policy [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1e90a23c6444273bc10051f3227804c', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '998daea123aa48b2816d1cbe9e662950', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 951.278606] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52762843-3efc-2ee9-6d00-56af2b4984e5, 'name': SearchDatastore_Task, 'duration_secs': 0.012492} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.278947] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.278947] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 951.279154] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 951.279307] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.279556] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 951.279852] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6c6ecc5a-6236-4f9e-9ceb-1886bca19a41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.288738] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 951.288923] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 951.289682] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6d253f82-d8ff-4d3c-b45b-c50e3bdf3308 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.295809] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 951.295809] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f6038a-b9fe-570b-878d-a0861554fa31" [ 951.295809] env[62619]: _type = "Task" [ 951.295809] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.303326] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f6038a-b9fe-570b-878d-a0861554fa31, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.309932] env[62619]: DEBUG nova.network.neutron [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Successfully created port: e14ea18e-2f1a-4153-9f37-ab23790db41f {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 951.427093] env[62619]: DEBUG oslo_concurrency.lockutils [req-b0c3d112-284e-4d59-92e0-3382354ab882 req-c3b20503-3885-469f-af28-40a7272fb49b service nova] Releasing lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 951.427641] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 951.427856] env[62619]: DEBUG nova.network.neutron [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 951.502666] env[62619]: DEBUG nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 951.690367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8183d52b-6999-4ca3-ba58-2a510e7a0ef2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.698437] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c08dcca6-f9cb-4cad-8f9a-cb9c85d4da5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.731780] env[62619]: DEBUG nova.network.neutron [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updating instance_info_cache with network_info: [{"id": "53678dcd-6f40-4645-961d-a9c2608eeba8", "address": "fa:16:3e:bb:46:22", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53678dcd-6f", "ovs_interfaceid": "53678dcd-6f40-4645-961d-a9c2608eeba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 951.734191] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6ff623a-3f3d-44e2-8431-010d7ce18379 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.743174] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fddff149-4a1c-459f-8617-ab1a04a43186 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.763866] env[62619]: DEBUG nova.compute.provider_tree [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 951.806877] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f6038a-b9fe-570b-878d-a0861554fa31, 'name': SearchDatastore_Task, 'duration_secs': 0.010159} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 951.807784] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3b56999a-6008-4911-9672-effde172921b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 951.813401] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 951.813401] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527c646a-45c4-db52-1f52-be1b086c55e4" [ 951.813401] env[62619]: _type = "Task" [ 951.813401] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 951.821618] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527c646a-45c4-db52-1f52-be1b086c55e4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 951.973877] env[62619]: DEBUG nova.network.neutron [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 952.114694] env[62619]: DEBUG nova.network.neutron [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance_info_cache with network_info: [{"id": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "address": "fa:16:3e:51:5c:ce", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f7c57e-c3", "ovs_interfaceid": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 952.234890] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.267225] env[62619]: DEBUG nova.scheduler.client.report [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 952.324626] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527c646a-45c4-db52-1f52-be1b086c55e4, 'name': SearchDatastore_Task, 'duration_secs': 0.012105} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 952.324922] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.325239] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 4bc613db-af56-48b4-8c24-7f44428d8b4f/4bc613db-af56-48b4-8c24-7f44428d8b4f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 952.325518] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7f448593-0cd0-4cc4-9872-9e574fe5776e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.332727] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 952.332727] env[62619]: value = "task-1365193" [ 952.332727] env[62619]: _type = "Task" [ 952.332727] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.340912] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365193, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.514690] env[62619]: DEBUG nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 952.540254] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 952.540623] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 952.540841] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 952.541127] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 952.541342] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 952.541566] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 952.541878] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 952.542191] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 952.542386] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 952.542632] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 952.542874] env[62619]: DEBUG nova.virt.hardware [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 952.544117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22217667-cf1f-474b-b9a5-56c2cc1e3358 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.555147] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13d5b84b-0751-4ea5-8f2d-4d0d81b6bb07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.617949] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 952.618424] env[62619]: DEBUG nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Instance network_info: |[{"id": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "address": "fa:16:3e:51:5c:ce", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f7c57e-c3", "ovs_interfaceid": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 952.618910] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:51:5c:ce', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 952.627189] env[62619]: DEBUG oslo.service.loopingcall [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 952.627484] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 952.627730] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-3be84e1d-ee0a-4519-bc0d-c95f2727689f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.649740] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 952.649740] env[62619]: value = "task-1365194" [ 952.649740] env[62619]: _type = "Task" [ 952.649740] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.658063] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365194, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.724422] env[62619]: DEBUG nova.compute.manager [req-a9db10b4-c7f9-4a25-bae7-4a597c776e3a req-120ffe0c-ddc7-4297-814a-014c9c18572e service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Received event network-vif-plugged-e14ea18e-2f1a-4153-9f37-ab23790db41f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 952.724614] env[62619]: DEBUG oslo_concurrency.lockutils [req-a9db10b4-c7f9-4a25-bae7-4a597c776e3a req-120ffe0c-ddc7-4297-814a-014c9c18572e service nova] Acquiring lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 952.724852] env[62619]: DEBUG oslo_concurrency.lockutils [req-a9db10b4-c7f9-4a25-bae7-4a597c776e3a req-120ffe0c-ddc7-4297-814a-014c9c18572e service nova] Lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.725126] env[62619]: DEBUG oslo_concurrency.lockutils [req-a9db10b4-c7f9-4a25-bae7-4a597c776e3a req-120ffe0c-ddc7-4297-814a-014c9c18572e service nova] Lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.725301] env[62619]: DEBUG nova.compute.manager [req-a9db10b4-c7f9-4a25-bae7-4a597c776e3a req-120ffe0c-ddc7-4297-814a-014c9c18572e service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] No waiting events found dispatching network-vif-plugged-e14ea18e-2f1a-4153-9f37-ab23790db41f {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 952.725572] env[62619]: WARNING nova.compute.manager [req-a9db10b4-c7f9-4a25-bae7-4a597c776e3a req-120ffe0c-ddc7-4297-814a-014c9c18572e service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Received unexpected event network-vif-plugged-e14ea18e-2f1a-4153-9f37-ab23790db41f for instance with vm_state building and task_state spawning. [ 952.766715] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 952.767027] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1402badf-94a7-48a9-aa8e-ea4437c0f801 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 952.771942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.274s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 952.772549] env[62619]: DEBUG nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 952.779027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.112s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 952.779027] env[62619]: DEBUG nova.objects.instance [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lazy-loading 'resources' on Instance uuid 1c1b7717-30a9-40c9-913f-6d65a619b94a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 952.779027] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 952.779027] env[62619]: value = "task-1365195" [ 952.779027] env[62619]: _type = "Task" [ 952.779027] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 952.787679] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365195, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 952.844512] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365193, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.161221] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365194, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.280708] env[62619]: DEBUG nova.compute.utils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 953.285264] env[62619]: DEBUG nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 953.285537] env[62619]: DEBUG nova.network.neutron [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 953.298059] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365195, 'name': PowerOffVM_Task, 'duration_secs': 0.318688} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.298865] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 953.299540] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fe18e9a9-02b2-477c-8434-5fcca01fc646 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.319871] env[62619]: DEBUG nova.network.neutron [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Successfully updated port: e14ea18e-2f1a-4153-9f37-ab23790db41f {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 953.324201] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bc5ea8d-9b0e-44f3-aa4c-4344c5ef031a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.329170] env[62619]: DEBUG nova.policy [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3581d242034a4c8b00417545bd52ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61b5e689e5544e6857baf8d3c52fe0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 953.345225] env[62619]: DEBUG nova.compute.manager [req-5b77c78f-02a3-4e5f-821b-25f7784ef8f5 req-8de3d9da-47fb-461a-a868-38f728bf7fa6 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Received event network-changed-e14ea18e-2f1a-4153-9f37-ab23790db41f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 953.345384] env[62619]: DEBUG nova.compute.manager [req-5b77c78f-02a3-4e5f-821b-25f7784ef8f5 req-8de3d9da-47fb-461a-a868-38f728bf7fa6 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Refreshing instance network info cache due to event network-changed-e14ea18e-2f1a-4153-9f37-ab23790db41f. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 953.345614] env[62619]: DEBUG oslo_concurrency.lockutils [req-5b77c78f-02a3-4e5f-821b-25f7784ef8f5 req-8de3d9da-47fb-461a-a868-38f728bf7fa6 service nova] Acquiring lock "refresh_cache-1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.345758] env[62619]: DEBUG oslo_concurrency.lockutils [req-5b77c78f-02a3-4e5f-821b-25f7784ef8f5 req-8de3d9da-47fb-461a-a868-38f728bf7fa6 service nova] Acquired lock "refresh_cache-1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.345929] env[62619]: DEBUG nova.network.neutron [req-5b77c78f-02a3-4e5f-821b-25f7784ef8f5 req-8de3d9da-47fb-461a-a868-38f728bf7fa6 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Refreshing network info cache for port e14ea18e-2f1a-4153-9f37-ab23790db41f {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 953.352757] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365193, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.595372} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.353036] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 4bc613db-af56-48b4-8c24-7f44428d8b4f/4bc613db-af56-48b4-8c24-7f44428d8b4f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 953.353288] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 953.353642] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-925f459f-a724-4f2f-ae53-5afd46f79023 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.365282] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 953.365282] env[62619]: value = "task-1365196" [ 953.365282] env[62619]: _type = "Task" [ 953.365282] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.367282] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 953.368105] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-9e2dca69-3883-4d33-ba04-262c10e9192f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.382774] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 953.382774] env[62619]: value = "task-1365197" [ 953.382774] env[62619]: _type = "Task" [ 953.382774] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.385743] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365196, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.397423] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 953.397728] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 953.398027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.398198] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.398399] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.398683] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dac48b63-9fba-4ff2-be94-c0e1170e8a14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.411579] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.411820] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 953.412669] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-562e086a-d07b-4d56-b5f5-2f9a1c43b275 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.421638] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 953.421638] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527c4906-a435-70b7-0263-0e699a6285bd" [ 953.421638] env[62619]: _type = "Task" [ 953.421638] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.430299] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527c4906-a435-70b7-0263-0e699a6285bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.498899] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c11783a7-afb1-40a3-b170-dbbb28a53b1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.506814] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73620fd3-2bb1-4456-8aa2-424aea6f4c13 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.536207] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b642235-3f77-468b-958a-10cab7644c8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.543677] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffecbe17-4e56-461f-8842-bf004c8a8df2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.556909] env[62619]: DEBUG nova.compute.provider_tree [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 953.661300] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365194, 'name': CreateVM_Task, 'duration_secs': 0.536386} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.661518] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 953.662258] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.662448] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 953.662763] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 953.663038] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cd446b74-48fb-44fe-82b8-cb64bd59fbdb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.667773] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 953.667773] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528541f0-66be-a3ef-9e20-74f81808335a" [ 953.667773] env[62619]: _type = "Task" [ 953.667773] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.672808] env[62619]: DEBUG nova.network.neutron [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Successfully created port: 691521f9-74b1-4539-ac75-fd476fb64893 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 953.679861] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528541f0-66be-a3ef-9e20-74f81808335a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.786053] env[62619]: DEBUG nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 953.830363] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "refresh_cache-1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 953.881467] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365196, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.068476} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.882688] env[62619]: DEBUG nova.network.neutron [req-5b77c78f-02a3-4e5f-821b-25f7784ef8f5 req-8de3d9da-47fb-461a-a868-38f728bf7fa6 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 953.885624] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 953.886894] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aeef7ad-6a0d-454e-a6f3-1995895dab7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.924469] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Reconfiguring VM instance instance-0000005a to attach disk [datastore1] 4bc613db-af56-48b4-8c24-7f44428d8b4f/4bc613db-af56-48b4-8c24-7f44428d8b4f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 953.925388] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5e85ae6f-8dfd-4fc2-80a1-6869059febad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.966071] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527c4906-a435-70b7-0263-0e699a6285bd, 'name': SearchDatastore_Task, 'duration_secs': 0.011731} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.968144] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 953.968144] env[62619]: value = "task-1365198" [ 953.968144] env[62619]: _type = "Task" [ 953.968144] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.968417] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a48a5ace-bccd-484e-b6f2-2aa30090b4f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.978467] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 953.978467] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5201483a-0830-c15e-a5e2-2b3f3166cc49" [ 953.978467] env[62619]: _type = "Task" [ 953.978467] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.982925] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365198, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.993544] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5201483a-0830-c15e-a5e2-2b3f3166cc49, 'name': SearchDatastore_Task, 'duration_secs': 0.012256} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 953.993871] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 953.994178] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copying virtual disk from [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 953.994532] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-efd1b620-da0d-452f-99a1-da48ebb497dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.001477] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 954.001477] env[62619]: value = "task-1365199" [ 954.001477] env[62619]: _type = "Task" [ 954.001477] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.005598] env[62619]: DEBUG nova.network.neutron [req-5b77c78f-02a3-4e5f-821b-25f7784ef8f5 req-8de3d9da-47fb-461a-a868-38f728bf7fa6 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 954.011972] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365199, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.091876] env[62619]: DEBUG nova.scheduler.client.report [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 111 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 954.092256] env[62619]: DEBUG nova.compute.provider_tree [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 111 to 112 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 954.092502] env[62619]: DEBUG nova.compute.provider_tree [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 954.179870] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528541f0-66be-a3ef-9e20-74f81808335a, 'name': SearchDatastore_Task, 'duration_secs': 0.010198} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.180272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.180539] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 954.180870] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 954.181193] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.181463] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.181790] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f3c156cc-5f7d-43e3-a089-c6a7ccb81718 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.191205] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.191477] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 954.192347] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-482cf332-71b8-4a10-ba85-426d8c5ae34b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.197768] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 954.197768] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b71316-a3c4-530a-8c65-73f493669293" [ 954.197768] env[62619]: _type = "Task" [ 954.197768] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.207299] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b71316-a3c4-530a-8c65-73f493669293, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.482444] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365198, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.525900] env[62619]: DEBUG oslo_concurrency.lockutils [req-5b77c78f-02a3-4e5f-821b-25f7784ef8f5 req-8de3d9da-47fb-461a-a868-38f728bf7fa6 service nova] Releasing lock "refresh_cache-1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 954.526924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "refresh_cache-1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 954.526924] env[62619]: DEBUG nova.network.neutron [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 954.535601] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365199, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.499398} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.536340] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copied virtual disk from [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk. [ 954.538208] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-639a9645-4bc1-41c9-aee1-a471bd0566b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.595972] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Reconfiguring VM instance instance-00000057 to attach disk [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 954.596659] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-df3b66b2-5387-4238-97e7-6aab98bdb8e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.610175] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.833s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 954.612396] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 7.012s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 954.612635] env[62619]: DEBUG nova.objects.instance [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lazy-loading 'resources' on Instance uuid 8c07697f-0e20-4ec5-88ec-ec4420906313 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 954.619474] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 954.619474] env[62619]: value = "task-1365200" [ 954.619474] env[62619]: _type = "Task" [ 954.619474] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.629838] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365200, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.635957] env[62619]: INFO nova.scheduler.client.report [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted allocations for instance 1c1b7717-30a9-40c9-913f-6d65a619b94a [ 954.708446] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b71316-a3c4-530a-8c65-73f493669293, 'name': SearchDatastore_Task, 'duration_secs': 0.014199} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.709386] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-53d316a6-8637-4727-be91-e24797cc5f08 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.715148] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 954.715148] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5279e79b-b110-7566-6a91-04ba59dcdb94" [ 954.715148] env[62619]: _type = "Task" [ 954.715148] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 954.723581] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5279e79b-b110-7566-6a91-04ba59dcdb94, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.745343] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.745694] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 954.745898] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 954.797530] env[62619]: DEBUG nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 954.824341] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 954.824627] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 954.824799] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 954.824989] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 954.825162] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 954.825313] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 954.825527] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 954.825689] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 954.825858] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 954.826034] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 954.826218] env[62619]: DEBUG nova.virt.hardware [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 954.827412] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3c366e-3b6a-484d-88aa-c8ddbf311581 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.835420] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcc81163-693e-4be2-a583-806c7144ec5c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.981927] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365198, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.062969] env[62619]: DEBUG nova.network.neutron [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 955.091427] env[62619]: DEBUG nova.compute.manager [req-627109b1-aa34-443f-9523-5c0d9dedd43a req-6a89135f-7d3c-4473-b4c1-fe4fc1482da7 service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Received event network-vif-plugged-691521f9-74b1-4539-ac75-fd476fb64893 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 955.091621] env[62619]: DEBUG oslo_concurrency.lockutils [req-627109b1-aa34-443f-9523-5c0d9dedd43a req-6a89135f-7d3c-4473-b4c1-fe4fc1482da7 service nova] Acquiring lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 955.091856] env[62619]: DEBUG oslo_concurrency.lockutils [req-627109b1-aa34-443f-9523-5c0d9dedd43a req-6a89135f-7d3c-4473-b4c1-fe4fc1482da7 service nova] Lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 955.092068] env[62619]: DEBUG oslo_concurrency.lockutils [req-627109b1-aa34-443f-9523-5c0d9dedd43a req-6a89135f-7d3c-4473-b4c1-fe4fc1482da7 service nova] Lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.092276] env[62619]: DEBUG nova.compute.manager [req-627109b1-aa34-443f-9523-5c0d9dedd43a req-6a89135f-7d3c-4473-b4c1-fe4fc1482da7 service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] No waiting events found dispatching network-vif-plugged-691521f9-74b1-4539-ac75-fd476fb64893 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 955.092485] env[62619]: WARNING nova.compute.manager [req-627109b1-aa34-443f-9523-5c0d9dedd43a req-6a89135f-7d3c-4473-b4c1-fe4fc1482da7 service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Received unexpected event network-vif-plugged-691521f9-74b1-4539-ac75-fd476fb64893 for instance with vm_state building and task_state spawning. [ 955.130575] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365200, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.146250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-83bbf0ea-6bb5-4e14-910b-68df4693a44b tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1c1b7717-30a9-40c9-913f-6d65a619b94a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 12.469s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 955.213922] env[62619]: DEBUG nova.network.neutron [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Updating instance_info_cache with network_info: [{"id": "e14ea18e-2f1a-4153-9f37-ab23790db41f", "address": "fa:16:3e:8d:13:af", "network": {"id": "17acf28e-3c95-4a28-a211-8598f95532aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1059820077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998daea123aa48b2816d1cbe9e662950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14ea18e-2f", "ovs_interfaceid": "e14ea18e-2f1a-4153-9f37-ab23790db41f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.227016] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5279e79b-b110-7566-6a91-04ba59dcdb94, 'name': SearchDatastore_Task, 'duration_secs': 0.010297} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.230334] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.230696] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 04e1e5ff-6385-4c3d-a226-355a171f7de0/04e1e5ff-6385-4c3d-a226-355a171f7de0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 955.231156] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-40fde7c8-ed2a-41cb-a500-7947e18df2e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.238425] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 955.238425] env[62619]: value = "task-1365201" [ 955.238425] env[62619]: _type = "Task" [ 955.238425] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.244471] env[62619]: DEBUG nova.network.neutron [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Successfully updated port: 691521f9-74b1-4539-ac75-fd476fb64893 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 955.253964] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365201, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.299385] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b05e7794-af22-4aec-be8d-db0a67b02eb8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.307301] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6e3820-ca85-4018-b189-7fbd882c3e3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.340261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ac76a00-510e-4813-98f9-a4a0b224bd38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.348541] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92ffa472-0bd6-40b2-bdf4-9dfec4108bdd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.363902] env[62619]: DEBUG nova.compute.provider_tree [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.482595] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365198, 'name': ReconfigVM_Task, 'duration_secs': 1.414423} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.482929] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Reconfigured VM instance instance-0000005a to attach disk [datastore1] 4bc613db-af56-48b4-8c24-7f44428d8b4f/4bc613db-af56-48b4-8c24-7f44428d8b4f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.483745] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-11efe6fd-184a-441b-98f8-e26d97c6d12d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.490825] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 955.490825] env[62619]: value = "task-1365202" [ 955.490825] env[62619]: _type = "Task" [ 955.490825] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.501588] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365202, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.630810] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365200, 'name': ReconfigVM_Task, 'duration_secs': 0.550158} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 955.631113] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Reconfigured VM instance instance-00000057 to attach disk [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 955.632251] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec84dc5b-cd97-4f10-a50b-7d20cd4449f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.658712] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1618d17e-56ad-4f60-b402-3b057a2a7e8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.675365] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 955.675365] env[62619]: value = "task-1365203" [ 955.675365] env[62619]: _type = "Task" [ 955.675365] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.684329] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365203, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.721829] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "refresh_cache-1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 955.721829] env[62619]: DEBUG nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Instance network_info: |[{"id": "e14ea18e-2f1a-4153-9f37-ab23790db41f", "address": "fa:16:3e:8d:13:af", "network": {"id": "17acf28e-3c95-4a28-a211-8598f95532aa", "bridge": "br-int", "label": "tempest-ImagesTestJSON-1059820077-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "998daea123aa48b2816d1cbe9e662950", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c", "external-id": "nsx-vlan-transportzone-772", "segmentation_id": 772, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape14ea18e-2f", "ovs_interfaceid": "e14ea18e-2f1a-4153-9f37-ab23790db41f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 955.722306] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8d:13:af', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c7c7f16b-a2e3-4d1f-9b7e-a44a9ebb589c', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e14ea18e-2f1a-4153-9f37-ab23790db41f', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 955.731689] env[62619]: DEBUG oslo.service.loopingcall [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.732118] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 955.732484] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0f26d0f3-69a0-4fb6-8d6f-aa4fd6a95b38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.752827] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "refresh_cache-f3638527-351b-4ea8-bf7d-6e90bc68abe4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 955.753058] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "refresh_cache-f3638527-351b-4ea8-bf7d-6e90bc68abe4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 955.753306] env[62619]: DEBUG nova.network.neutron [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 955.757444] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 955.757444] env[62619]: value = "task-1365204" [ 955.757444] env[62619]: _type = "Task" [ 955.757444] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 955.765256] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365201, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.772064] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365204, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 955.866878] env[62619]: DEBUG nova.scheduler.client.report [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 956.001607] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365202, 'name': Rename_Task, 'duration_secs': 0.375974} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.001897] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.002268] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-569528ff-2645-4105-ac3f-49a812e67639 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.009171] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 956.009171] env[62619]: value = "task-1365205" [ 956.009171] env[62619]: _type = "Task" [ 956.009171] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.018313] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365205, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.185367] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365203, 'name': ReconfigVM_Task, 'duration_secs': 0.230115} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.185661] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 956.185978] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-29aad976-0172-4fc1-b9e3-1942fb703f6d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.192666] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 956.192666] env[62619]: value = "task-1365206" [ 956.192666] env[62619]: _type = "Task" [ 956.192666] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.200611] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365206, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.257955] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365201, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.60563} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.258329] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 956.260240] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 04e1e5ff-6385-4c3d-a226-355a171f7de0/04e1e5ff-6385-4c3d-a226-355a171f7de0.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 956.260511] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 956.260760] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.261077] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-8a3b2dd6-7962-4902-a975-97bf48929025 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.267150] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.267721] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.268517] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.268773] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.268943] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.269085] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 956.269235] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 956.272699] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 956.272699] env[62619]: value = "task-1365207" [ 956.272699] env[62619]: _type = "Task" [ 956.272699] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.275377] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365204, 'name': CreateVM_Task, 'duration_secs': 0.327347} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.278565] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 956.279227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.279428] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.279953] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 956.280458] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b9206168-aad5-4d1b-b18f-d5a4fab7f0ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.287238] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365207, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.288787] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 956.288787] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522a986c-6b0b-e751-7210-e21b19665939" [ 956.288787] env[62619]: _type = "Task" [ 956.288787] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.298188] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522a986c-6b0b-e751-7210-e21b19665939, 'name': SearchDatastore_Task} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.298915] env[62619]: DEBUG nova.network.neutron [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 956.300824] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.301110] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 956.301360] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 956.301513] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 956.301694] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 956.302020] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6e4f8a43-1fa8-4521-9b8e-9188b995fc3c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.310599] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 956.310743] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 956.311642] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-07301ad1-3123-4c3e-96bd-fa98b4eadcdd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.317632] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 956.317632] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5243f112-b170-c6d9-2c46-6795efd78217" [ 956.317632] env[62619]: _type = "Task" [ 956.317632] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.327728] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5243f112-b170-c6d9-2c46-6795efd78217, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.373450] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.761s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.397870] env[62619]: INFO nova.scheduler.client.report [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Deleted allocations for instance 8c07697f-0e20-4ec5-88ec-ec4420906313 [ 956.521545] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365205, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.626488] env[62619]: DEBUG nova.network.neutron [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Updating instance_info_cache with network_info: [{"id": "691521f9-74b1-4539-ac75-fd476fb64893", "address": "fa:16:3e:0a:31:4a", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691521f9-74", "ovs_interfaceid": "691521f9-74b1-4539-ac75-fd476fb64893", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 956.706450] env[62619]: DEBUG oslo_vmware.api [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365206, 'name': PowerOnVM_Task, 'duration_secs': 0.431943} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.706450] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 956.707763] env[62619]: DEBUG nova.compute.manager [None req-0ecf9255-7da5-4f7c-8817-91e20ad9bc3e tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 956.708709] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bfa5259-c043-4585-809a-581f72643a39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.741279] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226b4e2-7822-c096-c04c-75fd0e1945d7/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 956.742327] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d81edc8f-8429-4854-8b71-977273bb534a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.757306] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226b4e2-7822-c096-c04c-75fd0e1945d7/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 956.757306] env[62619]: ERROR oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Aborting lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226b4e2-7822-c096-c04c-75fd0e1945d7/disk-0.vmdk due to incomplete transfer. [ 956.757306] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseAbort with opID=oslo.vmware-bd171c6b-5f1a-4307-86b0-d5437dc43132 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.765687] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Closed VMDK read handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/5226b4e2-7822-c096-c04c-75fd0e1945d7/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:735}} [ 956.766019] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Uploaded image 9845866c-f4a3-4a1c-a044-56cf34137267 to the Glance image server {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:472}} [ 956.769349] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 956.770148] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-b0bfe1a0-582c-42f8-9817-eb123fb73e40 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.774134] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.774421] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.774657] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.774863] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 956.776215] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2149c088-e641-4b52-af55-5854057c2c25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.787025] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 956.787025] env[62619]: value = "task-1365208" [ 956.787025] env[62619]: _type = "Task" [ 956.787025] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.795602] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c81d0fe-df59-41c5-a406-dae5d236f6de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.805358] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365207, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.065554} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.806516] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 956.807555] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d816638-7d83-4b7d-b1d5-4a420b1af962 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.827745] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365208, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.832094] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb79b7d5-b44a-4ce5-b642-ca26a63d3782 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.851066] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 04e1e5ff-6385-4c3d-a226-355a171f7de0/04e1e5ff-6385-4c3d-a226-355a171f7de0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 956.851935] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8c0801fb-c665-482f-b884-e670af168f2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.870756] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5243f112-b170-c6d9-2c46-6795efd78217, 'name': SearchDatastore_Task, 'duration_secs': 0.009699} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.872548] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b81dea3-bbfb-4679-9cdc-f12339edb5ce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.875541] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c38d9107-cd9b-45e9-8f6c-54567c5568bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.878892] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 956.878892] env[62619]: value = "task-1365209" [ 956.878892] env[62619]: _type = "Task" [ 956.878892] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.909210] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180243MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 956.909344] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 956.909531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 956.911043] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 956.911043] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522c9d8a-3971-4789-a2f2-515928a35b30" [ 956.911043] env[62619]: _type = "Task" [ 956.911043] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.911591] env[62619]: DEBUG oslo_concurrency.lockutils [None req-0a197edf-02ae-41f4-a5bc-d5e5ff28f142 tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "8c07697f-0e20-4ec5-88ec-ec4420906313" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 13.019s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 956.920329] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 956.925419] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]522c9d8a-3971-4789-a2f2-515928a35b30, 'name': SearchDatastore_Task, 'duration_secs': 0.010406} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 956.926168] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 956.926424] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e/1cbb03c7-5b97-4c3b-aee0-5fa948027a4e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 956.926691] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b44c3e0c-65dd-4281-8016-758a1bb398e1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.933110] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 956.933110] env[62619]: value = "task-1365210" [ 956.933110] env[62619]: _type = "Task" [ 956.933110] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 956.940783] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365210, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.023865] env[62619]: DEBUG oslo_vmware.api [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365205, 'name': PowerOnVM_Task, 'duration_secs': 0.659272} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.024077] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 957.024281] env[62619]: INFO nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Took 10.07 seconds to spawn the instance on the hypervisor. [ 957.024465] env[62619]: DEBUG nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 957.025233] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9da2b7ed-b2d8-4888-a6a3-9d6695cfa129 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.122431] env[62619]: DEBUG nova.compute.manager [req-4663f79c-66bd-4ff1-8c96-059c7d38711c req-d143bf8f-98cc-4a2e-a012-2356247296f1 service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Received event network-changed-691521f9-74b1-4539-ac75-fd476fb64893 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 957.122750] env[62619]: DEBUG nova.compute.manager [req-4663f79c-66bd-4ff1-8c96-059c7d38711c req-d143bf8f-98cc-4a2e-a012-2356247296f1 service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Refreshing instance network info cache due to event network-changed-691521f9-74b1-4539-ac75-fd476fb64893. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 957.123109] env[62619]: DEBUG oslo_concurrency.lockutils [req-4663f79c-66bd-4ff1-8c96-059c7d38711c req-d143bf8f-98cc-4a2e-a012-2356247296f1 service nova] Acquiring lock "refresh_cache-f3638527-351b-4ea8-bf7d-6e90bc68abe4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 957.128495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "refresh_cache-f3638527-351b-4ea8-bf7d-6e90bc68abe4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 957.128832] env[62619]: DEBUG nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Instance network_info: |[{"id": "691521f9-74b1-4539-ac75-fd476fb64893", "address": "fa:16:3e:0a:31:4a", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691521f9-74", "ovs_interfaceid": "691521f9-74b1-4539-ac75-fd476fb64893", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 957.129141] env[62619]: DEBUG oslo_concurrency.lockutils [req-4663f79c-66bd-4ff1-8c96-059c7d38711c req-d143bf8f-98cc-4a2e-a012-2356247296f1 service nova] Acquired lock "refresh_cache-f3638527-351b-4ea8-bf7d-6e90bc68abe4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 957.129335] env[62619]: DEBUG nova.network.neutron [req-4663f79c-66bd-4ff1-8c96-059c7d38711c req-d143bf8f-98cc-4a2e-a012-2356247296f1 service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Refreshing network info cache for port 691521f9-74b1-4539-ac75-fd476fb64893 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 957.130638] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:31:4a', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '691521f9-74b1-4539-ac75-fd476fb64893', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.138248] env[62619]: DEBUG oslo.service.loopingcall [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.139217] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 957.139425] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-664c3551-85ea-45cc-b6f5-d95fdc7cc2fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.160665] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.160665] env[62619]: value = "task-1365211" [ 957.160665] env[62619]: _type = "Task" [ 957.160665] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.169014] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365211, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.256713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "646b4ae6-09e1-4b3c-b17d-392e746df454" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.257046] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.257282] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 957.257547] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 957.257791] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 957.260087] env[62619]: INFO nova.compute.manager [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Terminating instance [ 957.262038] env[62619]: DEBUG nova.compute.manager [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 957.262319] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 957.263092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c117fe19-2d16-4b46-a3d3-ab1ae3c8a740 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.271440] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 957.271731] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-17fe07cc-60ac-43ae-a25d-95db3be5fcad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.279286] env[62619]: DEBUG oslo_vmware.api [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 957.279286] env[62619]: value = "task-1365212" [ 957.279286] env[62619]: _type = "Task" [ 957.279286] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.288352] env[62619]: DEBUG oslo_vmware.api [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365212, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.298864] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365208, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.390447] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365209, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.444640] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365210, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.542654] env[62619]: INFO nova.compute.manager [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Took 19.89 seconds to build instance. [ 957.671856] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365211, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.788622] env[62619]: DEBUG oslo_vmware.api [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365212, 'name': PowerOffVM_Task, 'duration_secs': 0.336601} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.788902] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 957.789089] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 957.789348] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-94c1b50f-9508-48f4-b5b7-1f8da62d31d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.799862] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365208, 'name': Destroy_Task, 'duration_secs': 0.79829} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.800067] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Destroyed the VM [ 957.800383] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 957.800679] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-dc17d3bc-370b-4663-b3a2-32c6dea223ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.808359] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 957.808359] env[62619]: value = "task-1365214" [ 957.808359] env[62619]: _type = "Task" [ 957.808359] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.816197] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365214, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.854894] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 957.855258] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 957.855386] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Deleting the datastore file [datastore1] 646b4ae6-09e1-4b3c-b17d-392e746df454 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 957.855570] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f63dfc99-cdc1-4c02-b844-9c11e38b056c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.861365] env[62619]: DEBUG oslo_vmware.api [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for the task: (returnval){ [ 957.861365] env[62619]: value = "task-1365215" [ 957.861365] env[62619]: _type = "Task" [ 957.861365] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.870984] env[62619]: DEBUG oslo_vmware.api [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365215, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.890939] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365209, 'name': ReconfigVM_Task, 'duration_secs': 0.729615} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.891245] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 04e1e5ff-6385-4c3d-a226-355a171f7de0/04e1e5ff-6385-4c3d-a226-355a171f7de0.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 957.891871] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0179a194-75b0-44fd-ade1-ed1a9a1ef1cd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.897387] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 957.897387] env[62619]: value = "task-1365216" [ 957.897387] env[62619]: _type = "Task" [ 957.897387] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.905509] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365216, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.946059] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365210, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.547764} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 957.947684] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 646b4ae6-09e1-4b3c-b17d-392e746df454 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.947822] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.947943] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 9f7d7830-b878-41b9-a236-f7cd5580cf1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.948077] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 02dec4f2-cbe7-4bb0-a57e-3970c5669354 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.948197] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.948310] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 74aa214a-7eda-4613-a394-bc7477d3078e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.948424] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 4bc613db-af56-48b4-8c24-7f44428d8b4f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.948535] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 04e1e5ff-6385-4c3d-a226-355a171f7de0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.948641] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.948748] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance f3638527-351b-4ea8-bf7d-6e90bc68abe4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 957.948939] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 957.949087] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2432MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 957.951361] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e/1cbb03c7-5b97-4c3b-aee0-5fa948027a4e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 957.951595] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 957.952501] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-782310f5-b738-4b87-9ec2-80414f4a5b2f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.965532] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 957.965532] env[62619]: value = "task-1365217" [ 957.965532] env[62619]: _type = "Task" [ 957.965532] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.976146] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365217, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.988807] env[62619]: DEBUG nova.network.neutron [req-4663f79c-66bd-4ff1-8c96-059c7d38711c req-d143bf8f-98cc-4a2e-a012-2356247296f1 service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Updated VIF entry in instance network info cache for port 691521f9-74b1-4539-ac75-fd476fb64893. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 957.989490] env[62619]: DEBUG nova.network.neutron [req-4663f79c-66bd-4ff1-8c96-059c7d38711c req-d143bf8f-98cc-4a2e-a012-2356247296f1 service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Updating instance_info_cache with network_info: [{"id": "691521f9-74b1-4539-ac75-fd476fb64893", "address": "fa:16:3e:0a:31:4a", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap691521f9-74", "ovs_interfaceid": "691521f9-74b1-4539-ac75-fd476fb64893", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.045368] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bfb052c-eeb5-4b01-b71f-2b5639e3bc30 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "4bc613db-af56-48b4-8c24-7f44428d8b4f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 21.401s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 958.097160] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5124ce1d-8ff6-4bb2-a221-53c0a558cf26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.105388] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-def4012f-6539-4143-add0-1416fde6b3a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.139418] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b08484be-61a0-4ad8-96a3-d866db7928d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.147320] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bae2926c-bc54-4644-9324-73ec852635e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.160720] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 958.169662] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365211, 'name': CreateVM_Task, 'duration_secs': 0.598087} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.170388] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 958.171111] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.171294] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.171613] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 958.172100] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-659b29bc-9953-464c-8b3a-d5e761d1afca {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.176702] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 958.176702] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525e2311-d6cf-8ef0-ff7f-769d1852d8ac" [ 958.176702] env[62619]: _type = "Task" [ 958.176702] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.185686] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525e2311-d6cf-8ef0-ff7f-769d1852d8ac, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.319307] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365214, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.373626] env[62619]: DEBUG oslo_vmware.api [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Task: {'id': task-1365215, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.118373} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.374078] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 958.374383] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 958.374688] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 958.374980] env[62619]: INFO nova.compute.manager [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Took 1.11 seconds to destroy the instance on the hypervisor. [ 958.375368] env[62619]: DEBUG oslo.service.loopingcall [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 958.375662] env[62619]: DEBUG nova.compute.manager [-] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 958.375809] env[62619]: DEBUG nova.network.neutron [-] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 958.407194] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365216, 'name': Rename_Task, 'duration_secs': 0.413987} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.407475] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 958.407700] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-88f3e9e8-fb8a-4954-b903-3db06752c9c6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.413625] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 958.413625] env[62619]: value = "task-1365218" [ 958.413625] env[62619]: _type = "Task" [ 958.413625] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.423182] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365218, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.437906] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4db162a4-d804-4313-b75d-912ef4773540 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.443957] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f64fca-9b81-4d7d-a29c-cc913205b106 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Suspending the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 958.444216] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-e6250516-2d0a-484b-a441-cfc75fe38484 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.449083] env[62619]: DEBUG oslo_vmware.api [None req-c2f64fca-9b81-4d7d-a29c-cc913205b106 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 958.449083] env[62619]: value = "task-1365219" [ 958.449083] env[62619]: _type = "Task" [ 958.449083] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.456200] env[62619]: DEBUG oslo_vmware.api [None req-c2f64fca-9b81-4d7d-a29c-cc913205b106 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365219, 'name': SuspendVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.475571] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365217, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.289488} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.475969] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 958.477102] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ad4f9a4-b750-4c3f-bfa7-cc8423282686 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.498187] env[62619]: INFO nova.compute.manager [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Unrescuing [ 958.498584] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.498857] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.499150] env[62619]: DEBUG nova.network.neutron [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 958.501646] env[62619]: DEBUG oslo_concurrency.lockutils [req-4663f79c-66bd-4ff1-8c96-059c7d38711c req-d143bf8f-98cc-4a2e-a012-2356247296f1 service nova] Releasing lock "refresh_cache-f3638527-351b-4ea8-bf7d-6e90bc68abe4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.516014] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Reconfiguring VM instance instance-0000005c to attach disk [datastore1] 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e/1cbb03c7-5b97-4c3b-aee0-5fa948027a4e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 958.517445] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-301ab8a3-08b9-48be-9958-df4434c2d3fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.538463] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 958.538463] env[62619]: value = "task-1365220" [ 958.538463] env[62619]: _type = "Task" [ 958.538463] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.548082] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365220, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.666021] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 958.688107] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525e2311-d6cf-8ef0-ff7f-769d1852d8ac, 'name': SearchDatastore_Task, 'duration_secs': 0.009911} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.689068] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 958.689323] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.689569] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 958.689719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 958.689907] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 958.690469] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-98149de2-f636-46d0-af10-8c0525e73acd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.699240] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 958.699532] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 958.700622] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-801ddc60-6add-4ce3-ab55-bbc6ac703e66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.707719] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 958.707719] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52cb85f3-58a2-de53-a39a-8028c4610177" [ 958.707719] env[62619]: _type = "Task" [ 958.707719] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.716715] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52cb85f3-58a2-de53-a39a-8028c4610177, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.821105] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365214, 'name': RemoveSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.924753] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365218, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.958453] env[62619]: DEBUG oslo_vmware.api [None req-c2f64fca-9b81-4d7d-a29c-cc913205b106 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365219, 'name': SuspendVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.049117] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365220, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.125128] env[62619]: DEBUG nova.network.neutron [-] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.171291] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 959.171517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.262s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 959.191477] env[62619]: DEBUG nova.compute.manager [req-ae7190b9-aef8-43bf-8687-64dd3cf75038 req-4fedca67-318b-45d5-9ab7-8022ae2402a2 service nova] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Received event network-vif-deleted-7b6beb91-6e5d-49a9-8465-ec68d37a4bbf {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 959.217793] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52cb85f3-58a2-de53-a39a-8028c4610177, 'name': SearchDatastore_Task, 'duration_secs': 0.017696} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.218625] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-073fb2a1-7a12-4e40-b385-625e9367374e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.224680] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 959.224680] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d2a568-1cef-823c-2e1a-07d814f3aa65" [ 959.224680] env[62619]: _type = "Task" [ 959.224680] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.232663] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d2a568-1cef-823c-2e1a-07d814f3aa65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.319829] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365214, 'name': RemoveSnapshot_Task, 'duration_secs': 1.180601} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.320137] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 959.320554] env[62619]: DEBUG nova.compute.manager [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.321342] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b29d643d-b522-4ac1-9386-98746604b1e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.424865] env[62619]: DEBUG oslo_vmware.api [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365218, 'name': PowerOnVM_Task, 'duration_secs': 0.994698} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.425501] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 959.425501] env[62619]: INFO nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Took 10.14 seconds to spawn the instance on the hypervisor. [ 959.425829] env[62619]: DEBUG nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.426536] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-638133ce-7bc9-41be-b0d5-d06999d0a307 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.461653] env[62619]: DEBUG oslo_vmware.api [None req-c2f64fca-9b81-4d7d-a29c-cc913205b106 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365219, 'name': SuspendVM_Task, 'duration_secs': 0.951223} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.461653] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-c2f64fca-9b81-4d7d-a29c-cc913205b106 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Suspended the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 959.461653] env[62619]: DEBUG nova.compute.manager [None req-c2f64fca-9b81-4d7d-a29c-cc913205b106 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 959.462358] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bd8abc4-811e-4410-8117-69387f6e352d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.550481] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365220, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.560129] env[62619]: DEBUG nova.network.neutron [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updating instance_info_cache with network_info: [{"id": "53678dcd-6f40-4645-961d-a9c2608eeba8", "address": "fa:16:3e:bb:46:22", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53678dcd-6f", "ovs_interfaceid": "53678dcd-6f40-4645-961d-a9c2608eeba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 959.628037] env[62619]: INFO nova.compute.manager [-] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Took 1.25 seconds to deallocate network for instance. [ 959.735312] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d2a568-1cef-823c-2e1a-07d814f3aa65, 'name': SearchDatastore_Task, 'duration_secs': 0.009311} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 959.735557] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 959.735788] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] f3638527-351b-4ea8-bf7d-6e90bc68abe4/f3638527-351b-4ea8-bf7d-6e90bc68abe4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 959.736103] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7a0e572c-9235-4a0e-9e12-dd4f8b3c27c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.742099] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 959.742099] env[62619]: value = "task-1365221" [ 959.742099] env[62619]: _type = "Task" [ 959.742099] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.749308] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365221, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.833788] env[62619]: INFO nova.compute.manager [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Shelve offloading [ 959.835612] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 959.835895] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-521434a2-cc4d-43a6-91e1-5ce0577cf3f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 959.842244] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 959.842244] env[62619]: value = "task-1365222" [ 959.842244] env[62619]: _type = "Task" [ 959.842244] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 959.849859] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365222, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 959.944614] env[62619]: INFO nova.compute.manager [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Took 16.87 seconds to build instance. [ 960.049759] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365220, 'name': ReconfigVM_Task, 'duration_secs': 1.481853} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.050066] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Reconfigured VM instance instance-0000005c to attach disk [datastore1] 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e/1cbb03c7-5b97-4c3b-aee0-5fa948027a4e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 960.050765] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-178860d2-7826-4b12-8b84-144664ce824a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.057834] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 960.057834] env[62619]: value = "task-1365223" [ 960.057834] env[62619]: _type = "Task" [ 960.057834] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.067033] env[62619]: DEBUG oslo_concurrency.lockutils [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 960.067682] env[62619]: DEBUG nova.objects.instance [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lazy-loading 'flavor' on Instance uuid 02dec4f2-cbe7-4bb0-a57e-3970c5669354 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.069046] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365223, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.135162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 960.135497] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 960.135783] env[62619]: DEBUG nova.objects.instance [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lazy-loading 'resources' on Instance uuid 646b4ae6-09e1-4b3c-b17d-392e746df454 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 960.254571] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365221, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.352018] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 960.352262] env[62619]: DEBUG nova.compute.manager [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 960.353086] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-163eece5-5163-45b7-86d7-8d93b1fefeda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.358589] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 960.359298] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 960.359298] env[62619]: DEBUG nova.network.neutron [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 960.446832] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a8a9f817-0ea7-4f0c-b15d-30e3b97db6f2 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.382s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 960.568986] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365223, 'name': Rename_Task, 'duration_secs': 0.243769} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.569293] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 960.571422] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-066289d4-3d86-4e77-9ebc-2fcf1adb80f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.576800] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-203849af-5560-4c8f-ba87-646e2f379446 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.580462] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 960.580462] env[62619]: value = "task-1365224" [ 960.580462] env[62619]: _type = "Task" [ 960.580462] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.599533] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 960.600330] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f4c7de7a-1c20-47d4-8263-32b6148facbc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.604902] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365224, 'name': PowerOnVM_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.608826] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 960.608826] env[62619]: value = "task-1365225" [ 960.608826] env[62619]: _type = "Task" [ 960.608826] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.616014] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365225, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.757618] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365221, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.540315} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 960.757895] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] f3638527-351b-4ea8-bf7d-6e90bc68abe4/f3638527-351b-4ea8-bf7d-6e90bc68abe4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 960.759061] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 960.759375] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a0fde31c-cddf-4537-ae57-5792c2100458 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.766764] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 960.766764] env[62619]: value = "task-1365226" [ 960.766764] env[62619]: _type = "Task" [ 960.766764] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 960.779429] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365226, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 960.803071] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75388c47-bc4d-441e-8c70-bcb88e3177e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.810382] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5608f064-0b73-4d5a-8afd-4246e63b28f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.844835] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63f8b0df-7f6b-4dec-906b-da9c7673e1b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.852671] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e46262d0-1b75-4560-b4c4-c34d05dd55d0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 960.868625] env[62619]: DEBUG nova.compute.provider_tree [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 961.078476] env[62619]: DEBUG nova.network.neutron [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [{"id": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "address": "fa:16:3e:2e:a8:22", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7fa88a-7d", "ovs_interfaceid": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 961.089950] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365224, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.117231] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365225, 'name': PowerOffVM_Task, 'duration_secs': 0.310954} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.117499] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 961.122617] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Reconfiguring VM instance instance-00000057 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 961.122882] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-026b66fc-c8eb-4abe-804a-17f1d784f2bc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.140532] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 961.140532] env[62619]: value = "task-1365227" [ 961.140532] env[62619]: _type = "Task" [ 961.140532] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.150348] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365227, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.276091] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365226, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.109491} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.276399] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 961.277208] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ee11d89-5729-4e32-9993-574c74439aa4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.293082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "4bc613db-af56-48b4-8c24-7f44428d8b4f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.293216] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "4bc613db-af56-48b4-8c24-7f44428d8b4f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.293541] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "4bc613db-af56-48b4-8c24-7f44428d8b4f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.293713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "4bc613db-af56-48b4-8c24-7f44428d8b4f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.293821] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "4bc613db-af56-48b4-8c24-7f44428d8b4f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.303624] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Reconfiguring VM instance instance-0000005d to attach disk [datastore1] f3638527-351b-4ea8-bf7d-6e90bc68abe4/f3638527-351b-4ea8-bf7d-6e90bc68abe4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 961.304222] env[62619]: INFO nova.compute.manager [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Terminating instance [ 961.305722] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4f95ded1-b76c-4abf-a93f-0c54689fdeb0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.320689] env[62619]: DEBUG nova.compute.manager [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 961.320949] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 961.322231] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be8126dd-3cf5-4ed6-a3ed-11787bfd2aa3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.329608] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 961.330738] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e2790c76-fc40-4780-9d3e-4700d27e836b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.332197] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 961.332197] env[62619]: value = "task-1365228" [ 961.332197] env[62619]: _type = "Task" [ 961.332197] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.339756] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365228, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.372525] env[62619]: DEBUG nova.scheduler.client.report [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 961.419549] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 961.419775] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 961.419958] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleting the datastore file [datastore1] 4bc613db-af56-48b4-8c24-7f44428d8b4f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 961.420301] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-840875c9-7247-4fe7-9267-2ae9c7e9c412 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.426695] env[62619]: DEBUG oslo_vmware.api [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 961.426695] env[62619]: value = "task-1365230" [ 961.426695] env[62619]: _type = "Task" [ 961.426695] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.435727] env[62619]: DEBUG oslo_vmware.api [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365230, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.581344] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 961.594932] env[62619]: DEBUG oslo_vmware.api [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365224, 'name': PowerOnVM_Task, 'duration_secs': 0.602253} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.595311] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 961.595596] env[62619]: INFO nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Took 9.08 seconds to spawn the instance on the hypervisor. [ 961.595854] env[62619]: DEBUG nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 961.596974] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04e54faa-cf50-4c52-9ed9-480e7dc85a34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.654845] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365227, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.758449] env[62619]: DEBUG nova.compute.manager [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 961.843045] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365228, 'name': ReconfigVM_Task, 'duration_secs': 0.505035} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.843441] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Reconfigured VM instance instance-0000005d to attach disk [datastore1] f3638527-351b-4ea8-bf7d-6e90bc68abe4/f3638527-351b-4ea8-bf7d-6e90bc68abe4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 961.844181] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-6bce196f-bb29-4a98-95e4-363c15a9b14d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 961.850695] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 961.850695] env[62619]: value = "task-1365231" [ 961.850695] env[62619]: _type = "Task" [ 961.850695] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 961.859983] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365231, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 961.877999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.742s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.900206] env[62619]: INFO nova.scheduler.client.report [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Deleted allocations for instance 646b4ae6-09e1-4b3c-b17d-392e746df454 [ 961.937572] env[62619]: DEBUG oslo_vmware.api [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365230, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.184517} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 961.937985] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 961.938239] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 961.938460] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 961.938686] env[62619]: INFO nova.compute.manager [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Took 0.62 seconds to destroy the instance on the hypervisor. [ 961.939029] env[62619]: DEBUG oslo.service.loopingcall [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 961.939400] env[62619]: DEBUG nova.compute.manager [-] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 961.939488] env[62619]: DEBUG nova.network.neutron [-] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 961.985489] env[62619]: DEBUG nova.compute.manager [req-e1ae57c5-3eb9-44ea-8cb4-7c731a408146 req-35431f1a-b42c-48db-92dd-12d3d14fe9c6 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received event network-vif-unplugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 961.986048] env[62619]: DEBUG oslo_concurrency.lockutils [req-e1ae57c5-3eb9-44ea-8cb4-7c731a408146 req-35431f1a-b42c-48db-92dd-12d3d14fe9c6 service nova] Acquiring lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 961.986283] env[62619]: DEBUG oslo_concurrency.lockutils [req-e1ae57c5-3eb9-44ea-8cb4-7c731a408146 req-35431f1a-b42c-48db-92dd-12d3d14fe9c6 service nova] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 961.986462] env[62619]: DEBUG oslo_concurrency.lockutils [req-e1ae57c5-3eb9-44ea-8cb4-7c731a408146 req-35431f1a-b42c-48db-92dd-12d3d14fe9c6 service nova] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 961.986638] env[62619]: DEBUG nova.compute.manager [req-e1ae57c5-3eb9-44ea-8cb4-7c731a408146 req-35431f1a-b42c-48db-92dd-12d3d14fe9c6 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] No waiting events found dispatching network-vif-unplugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 961.986810] env[62619]: WARNING nova.compute.manager [req-e1ae57c5-3eb9-44ea-8cb4-7c731a408146 req-35431f1a-b42c-48db-92dd-12d3d14fe9c6 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received unexpected event network-vif-unplugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e for instance with vm_state shelved and task_state shelving_offloading. [ 962.031886] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 962.033204] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2eeb182d-74dd-47e7-802e-a4693488eec8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.042734] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 962.043073] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-4b6a7436-4d3c-4fac-ad56-f41e6919304d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.118199] env[62619]: INFO nova.compute.manager [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Took 17.92 seconds to build instance. [ 962.120584] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 962.120841] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 962.121097] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleting the datastore file [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 962.121578] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e3921fbe-3c16-4ca3-8ee2-56e553d3c572 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.127883] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 962.127883] env[62619]: value = "task-1365233" [ 962.127883] env[62619]: _type = "Task" [ 962.127883] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.135972] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365233, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.150567] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365227, 'name': ReconfigVM_Task, 'duration_secs': 0.54926} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.150909] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Reconfigured VM instance instance-00000057 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 962.151128] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.151384] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-bc74c6d5-fc7b-4429-ba9c-e57837355832 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.157205] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 962.157205] env[62619]: value = "task-1365234" [ 962.157205] env[62619]: _type = "Task" [ 962.157205] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.167974] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365234, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.222724] env[62619]: DEBUG nova.compute.manager [req-88c539f9-75ae-4979-9483-2f2bbed170cd req-1650c272-6fd1-470e-906e-a9c629b89e48 service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Received event network-vif-deleted-eaa83ebe-4c4d-40e5-9883-5f85c6f71217 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 962.222724] env[62619]: INFO nova.compute.manager [req-88c539f9-75ae-4979-9483-2f2bbed170cd req-1650c272-6fd1-470e-906e-a9c629b89e48 service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Neutron deleted interface eaa83ebe-4c4d-40e5-9883-5f85c6f71217; detaching it from the instance and deleting it from the info cache [ 962.223057] env[62619]: DEBUG nova.network.neutron [req-88c539f9-75ae-4979-9483-2f2bbed170cd req-1650c272-6fd1-470e-906e-a9c629b89e48 service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.277946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 962.278251] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 962.361430] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365231, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.407838] env[62619]: DEBUG oslo_concurrency.lockutils [None req-953de759-3226-41d3-b2a7-ed04a22e8c6b tempest-AttachInterfacesTestJSON-1676831864 tempest-AttachInterfacesTestJSON-1676831864-project-member] Lock "646b4ae6-09e1-4b3c-b17d-392e746df454" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.151s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.622902] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e3461c8f-7a53-42e1-97fb-734f094a210a tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 19.437s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 962.637054] env[62619]: DEBUG oslo_vmware.api [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365233, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.223275} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.637315] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 962.637508] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 962.637678] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 962.667380] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365234, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 962.668516] env[62619]: INFO nova.scheduler.client.report [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleted allocations for instance 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a [ 962.704213] env[62619]: DEBUG nova.network.neutron [-] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 962.725455] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-01666d41-a27f-46d7-b685-6c39a81b3ad4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.735825] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47f9b5bc-34b0-4b81-8852-72f64b4862db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.764708] env[62619]: DEBUG nova.compute.manager [req-88c539f9-75ae-4979-9483-2f2bbed170cd req-1650c272-6fd1-470e-906e-a9c629b89e48 service nova] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Detach interface failed, port_id=eaa83ebe-4c4d-40e5-9883-5f85c6f71217, reason: Instance 4bc613db-af56-48b4-8c24-7f44428d8b4f could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 962.783955] env[62619]: INFO nova.compute.claims [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 962.862483] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365231, 'name': Rename_Task, 'duration_secs': 0.917191} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 962.862693] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 962.862838] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0dc8dae7-1a70-4f76-9893-e42bf05b2f85 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 962.869604] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 962.869604] env[62619]: value = "task-1365235" [ 962.869604] env[62619]: _type = "Task" [ 962.869604] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 962.877263] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365235, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.169196] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365234, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.173016] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.209486] env[62619]: INFO nova.compute.manager [-] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Took 1.27 seconds to deallocate network for instance. [ 963.291969] env[62619]: INFO nova.compute.resource_tracker [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating resource usage from migration 9cd4bbc0-93f7-428b-889f-3b09903f6ab4 [ 963.385685] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365235, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.440658] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-475ea781-9637-4caa-bb11-1224062f8805 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.448394] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510ff733-7da7-40a1-82dc-db76743a69ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.478784] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0257a407-9f34-496f-b3ea-0a511913abc2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.487605] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee333cf9-c6d1-4b60-831a-e0c04a5efb83 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 963.505861] env[62619]: DEBUG nova.compute.provider_tree [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 963.676646] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365234, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 963.715546] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 963.887027] env[62619]: DEBUG oslo_vmware.api [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365235, 'name': PowerOnVM_Task, 'duration_secs': 0.779666} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 963.887027] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 963.887027] env[62619]: INFO nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Took 9.09 seconds to spawn the instance on the hypervisor. [ 963.887027] env[62619]: DEBUG nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 963.887027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6a6a2b-781e-4871-b819-101171e6e08b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.008587] env[62619]: DEBUG nova.scheduler.client.report [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 964.016151] env[62619]: DEBUG nova.compute.manager [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 964.016151] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1dcc85a8-2b6f-404b-85da-58d7d59eb455 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.022786] env[62619]: DEBUG nova.compute.manager [req-2563505f-aef6-4a66-a4de-4c03b1ea414e req-a143340f-bfb3-40b7-b7b3-c0632b4273c4 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received event network-changed-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 964.024593] env[62619]: DEBUG nova.compute.manager [req-2563505f-aef6-4a66-a4de-4c03b1ea414e req-a143340f-bfb3-40b7-b7b3-c0632b4273c4 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Refreshing instance network info cache due to event network-changed-8c7fa88a-7dbd-49cf-b490-e311fa9a804e. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 964.024593] env[62619]: DEBUG oslo_concurrency.lockutils [req-2563505f-aef6-4a66-a4de-4c03b1ea414e req-a143340f-bfb3-40b7-b7b3-c0632b4273c4 service nova] Acquiring lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 964.024593] env[62619]: DEBUG oslo_concurrency.lockutils [req-2563505f-aef6-4a66-a4de-4c03b1ea414e req-a143340f-bfb3-40b7-b7b3-c0632b4273c4 service nova] Acquired lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 964.024593] env[62619]: DEBUG nova.network.neutron [req-2563505f-aef6-4a66-a4de-4c03b1ea414e req-a143340f-bfb3-40b7-b7b3-c0632b4273c4 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Refreshing network info cache for port 8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 964.171764] env[62619]: DEBUG oslo_vmware.api [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365234, 'name': PowerOnVM_Task, 'duration_secs': 1.8659} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 964.172983] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 964.172983] env[62619]: DEBUG nova.compute.manager [None req-c02750f4-6479-42d7-8b50-0f56adadb177 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 964.173779] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9a3c712b-605e-41db-aa18-175082dbe1de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.410735] env[62619]: INFO nova.compute.manager [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Took 19.17 seconds to build instance. [ 964.505041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 964.514986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.237s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 964.515239] env[62619]: INFO nova.compute.manager [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Migrating [ 964.522213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.349s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 964.522494] env[62619]: DEBUG nova.objects.instance [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lazy-loading 'resources' on Instance uuid 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 964.535198] env[62619]: INFO nova.compute.manager [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] instance snapshotting [ 964.542616] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dcf55920-75e9-42e2-b797-77d41b255a23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.565496] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c996b94c-b100-4d26-806f-7cfc6ec5f905 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 964.754246] env[62619]: DEBUG nova.network.neutron [req-2563505f-aef6-4a66-a4de-4c03b1ea414e req-a143340f-bfb3-40b7-b7b3-c0632b4273c4 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updated VIF entry in instance network info cache for port 8c7fa88a-7dbd-49cf-b490-e311fa9a804e. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 964.754635] env[62619]: DEBUG nova.network.neutron [req-2563505f-aef6-4a66-a4de-4c03b1ea414e req-a143340f-bfb3-40b7-b7b3-c0632b4273c4 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [{"id": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "address": "fa:16:3e:2e:a8:22", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": null, "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "unbound", "details": {}, "devname": "tap8c7fa88a-7d", "ovs_interfaceid": null, "qbh_params": null, "qbg_params": null, "active": false, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 964.914391] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb2cc9e3-ce46-4180-8f89-48c2b8289505 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 20.678s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.035216] env[62619]: DEBUG nova.objects.instance [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lazy-loading 'numa_topology' on Instance uuid 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 965.036276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 965.036439] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 965.036610] env[62619]: DEBUG nova.network.neutron [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 965.078843] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Creating Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:913}} [ 965.079236] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CreateSnapshot_Task with opID=oslo.vmware-499b3787-73d0-42b3-bacd-3c4be0e6d6fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.087537] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 965.087537] env[62619]: value = "task-1365236" [ 965.087537] env[62619]: _type = "Task" [ 965.087537] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.095235] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365236, 'name': CreateSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.257451] env[62619]: DEBUG oslo_concurrency.lockutils [req-2563505f-aef6-4a66-a4de-4c03b1ea414e req-a143340f-bfb3-40b7-b7b3-c0632b4273c4 service nova] Releasing lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 965.421220] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.421550] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.421784] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 965.421979] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 965.422164] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 965.424922] env[62619]: INFO nova.compute.manager [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Terminating instance [ 965.427556] env[62619]: DEBUG nova.compute.manager [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 965.427556] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 965.428581] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8596eb4-3ec7-4a80-8970-45d7d5da8772 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.437524] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 965.437775] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0c196f32-53cb-4c5e-b198-b306d6ab6d37 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.443438] env[62619]: DEBUG oslo_vmware.api [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 965.443438] env[62619]: value = "task-1365237" [ 965.443438] env[62619]: _type = "Task" [ 965.443438] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 965.451168] env[62619]: DEBUG oslo_vmware.api [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365237, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.539657] env[62619]: DEBUG nova.objects.base [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Object Instance<3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a> lazy-loaded attributes: resources,numa_topology {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 965.603266] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365236, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 965.713243] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-751ff886-7772-4317-97c4-642f5020cb63 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.720808] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12122e4e-3e3d-4648-973a-2c9e2d0b3598 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.754117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738f34a3-eea8-4498-ae9a-bac91493cf8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.761921] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b5efba3-c059-4d10-a1bc-46341c4925ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 965.775402] env[62619]: DEBUG nova.compute.provider_tree [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 965.929344] env[62619]: DEBUG nova.network.neutron [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance_info_cache with network_info: [{"id": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "address": "fa:16:3e:51:5c:ce", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f7c57e-c3", "ovs_interfaceid": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 965.953992] env[62619]: DEBUG oslo_vmware.api [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365237, 'name': PowerOffVM_Task, 'duration_secs': 0.181636} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 965.954293] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 965.954803] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 965.954934] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-31abe8ff-b63b-42d4-b378-604992073b89 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.012233] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 966.012511] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 966.012773] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleting the datastore file [datastore1] f3638527-351b-4ea8-bf7d-6e90bc68abe4 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 966.013129] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fe3079a3-d40a-4ac7-899c-0db15f728695 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.019511] env[62619]: DEBUG oslo_vmware.api [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 966.019511] env[62619]: value = "task-1365239" [ 966.019511] env[62619]: _type = "Task" [ 966.019511] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 966.027616] env[62619]: DEBUG oslo_vmware.api [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365239, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.100146] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365236, 'name': CreateSnapshot_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 966.279758] env[62619]: DEBUG nova.scheduler.client.report [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 966.432878] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 966.533776] env[62619]: DEBUG oslo_vmware.api [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365239, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137128} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.537029] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 966.537373] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 966.539265] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 966.539478] env[62619]: INFO nova.compute.manager [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Took 1.11 seconds to destroy the instance on the hypervisor. [ 966.539735] env[62619]: DEBUG oslo.service.loopingcall [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 966.539952] env[62619]: DEBUG nova.compute.manager [-] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 966.540058] env[62619]: DEBUG nova.network.neutron [-] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 966.600961] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365236, 'name': CreateSnapshot_Task, 'duration_secs': 1.01589} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 966.601963] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Created Snapshot of the VM instance {{(pid=62619) _create_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:922}} [ 966.602068] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f891e6bb-d8d2-4b44-bb63-d9c6889bbe27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 966.788045] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 2.266s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 966.791295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 3.076s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 966.791604] env[62619]: DEBUG nova.objects.instance [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lazy-loading 'resources' on Instance uuid 4bc613db-af56-48b4-8c24-7f44428d8b4f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 967.006066] env[62619]: DEBUG nova.compute.manager [req-4a2a0bd4-3c9d-4497-99f2-68be43d991e7 req-e8fd4a4b-5a7d-4fd9-a3af-2b1bba0641be service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Received event network-vif-deleted-691521f9-74b1-4539-ac75-fd476fb64893 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 967.006305] env[62619]: INFO nova.compute.manager [req-4a2a0bd4-3c9d-4497-99f2-68be43d991e7 req-e8fd4a4b-5a7d-4fd9-a3af-2b1bba0641be service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Neutron deleted interface 691521f9-74b1-4539-ac75-fd476fb64893; detaching it from the instance and deleting it from the info cache [ 967.006597] env[62619]: DEBUG nova.network.neutron [req-4a2a0bd4-3c9d-4497-99f2-68be43d991e7 req-e8fd4a4b-5a7d-4fd9-a3af-2b1bba0641be service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.119453] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Creating linked-clone VM from snapshot {{(pid=62619) _create_linked_clone_from_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:955}} [ 967.119797] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.CloneVM_Task with opID=oslo.vmware-27133a4c-9b8d-4807-9e8c-b857029d94f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.128517] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 967.128517] env[62619]: value = "task-1365240" [ 967.128517] env[62619]: _type = "Task" [ 967.128517] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 967.137188] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365240, 'name': CloneVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.298517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9cef29b0-5c00-4feb-822e-ceb9fa74b76a tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" "released" by "nova.compute.manager.ComputeManager.shelve_instance..do_shelve_instance" :: held 24.930s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 967.302197] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" acquired by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: waited 2.798s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 967.302792] env[62619]: INFO nova.compute.manager [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Unshelving [ 967.431466] env[62619]: DEBUG nova.network.neutron [-] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 967.511528] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c064bd6-724b-4553-93b5-0cac2269c948 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.522524] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34d371b5-02dd-4d27-a503-6674628f84c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.556895] env[62619]: DEBUG nova.compute.manager [req-4a2a0bd4-3c9d-4497-99f2-68be43d991e7 req-e8fd4a4b-5a7d-4fd9-a3af-2b1bba0641be service nova] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Detach interface failed, port_id=691521f9-74b1-4539-ac75-fd476fb64893, reason: Instance f3638527-351b-4ea8-bf7d-6e90bc68abe4 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 967.558284] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c26fd34f-24f1-48e5-91fe-a2197e1f6438 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.566332] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a98a7767-b71f-4ddf-8a3b-88e287b0328c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.596575] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df28458-7270-4690-a45d-55b3074ce11e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.604703] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47144f1d-5403-49e2-9e5d-18e5e0068e14 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.619505] env[62619]: DEBUG nova.compute.provider_tree [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 967.640303] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365240, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 967.933901] env[62619]: INFO nova.compute.manager [-] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Took 1.39 seconds to deallocate network for instance. [ 967.959019] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a9d9671-6e91-4952-b52d-91b09ebe7417 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 967.982211] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance '04e1e5ff-6385-4c3d-a226-355a171f7de0' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 968.140978] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365240, 'name': CloneVM_Task} progress is 94%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.145238] env[62619]: ERROR nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [req-0ce47b7b-539c-461f-81b0-b6b1f0068932] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-0ce47b7b-539c-461f-81b0-b6b1f0068932"}]} [ 968.163171] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 968.177049] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 968.177361] env[62619]: DEBUG nova.compute.provider_tree [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 968.189266] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 968.208362] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 968.329533] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.353119] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646ff9c8-8140-4cb5-b22a-f36dd4af465f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.360975] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f12172c3-ad6a-4aa3-b6d8-945ac07def82 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.391181] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c78695b1-66c5-4048-bba9-15da60baedad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.399353] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67c82f84-ed2e-43d1-a30b-570dd10305b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.413735] env[62619]: DEBUG nova.compute.provider_tree [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 968.441765] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.488715] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 968.489165] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4e5d50c0-d0cc-4d4b-9d0f-019f46375fd4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 968.496772] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 968.496772] env[62619]: value = "task-1365241" [ 968.496772] env[62619]: _type = "Task" [ 968.496772] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 968.510614] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365241, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.581554] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 968.581860] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 968.640316] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365240, 'name': CloneVM_Task} progress is 95%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 968.935036] env[62619]: ERROR nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [req-67e26bd8-1c0b-4559-baed-ef07221a8248] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-67e26bd8-1c0b-4559-baed-ef07221a8248"}]} [ 968.951633] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 968.966346] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 968.966597] env[62619]: DEBUG nova.compute.provider_tree [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 968.977200] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 968.993671] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 969.006676] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365241, 'name': PowerOffVM_Task, 'duration_secs': 0.312621} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.006945] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 969.007147] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance '04e1e5ff-6385-4c3d-a226-355a171f7de0' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 969.083827] env[62619]: DEBUG nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 969.142987] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365240, 'name': CloneVM_Task, 'duration_secs': 1.879913} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 969.143351] env[62619]: INFO nova.virt.vmwareapi.vmops [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Created linked-clone VM from snapshot [ 969.144477] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ee9a186-7056-4a4c-b50c-557621e9d796 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.154443] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Uploading image 26131b53-c780-4a5f-bcc1-55bc66be3873 {{(pid=62619) upload_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:441}} [ 969.166951] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-15b4ab42-af51-491e-8011-66ce3b583501 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.168573] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Destroying the VM {{(pid=62619) destroy_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1368}} [ 969.168810] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Destroy_Task with opID=oslo.vmware-590fa23c-a136-412c-b2d5-bfdda5822254 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.174087] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29ba34a0-f003-460c-9098-6a8bd60b51f6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.177889] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 969.177889] env[62619]: value = "task-1365242" [ 969.177889] env[62619]: _type = "Task" [ 969.177889] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.207682] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc8bbbd1-1e5f-4d88-b955-6099a0e9f5e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.212940] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365242, 'name': Destroy_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.217700] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e8c7ec-37e6-488e-b2e3-10efb49092d7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.230590] env[62619]: DEBUG nova.compute.provider_tree [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 969.392924] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "69e916ee-9edc-4e1b-85a0-40142364e3bb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.393670] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "69e916ee-9edc-4e1b-85a0-40142364e3bb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 969.515532] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 969.515894] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 969.516065] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 969.516253] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 969.516399] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 969.516539] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 969.516775] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 969.516982] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 969.517171] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 969.517333] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 969.517502] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 969.523028] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f153d62d-585f-4344-90f4-b756f94311c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 969.538671] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 969.538671] env[62619]: value = "task-1365243" [ 969.538671] env[62619]: _type = "Task" [ 969.538671] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 969.548319] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365243, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.605543] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 969.688319] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365242, 'name': Destroy_Task} progress is 33%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 969.760837] env[62619]: DEBUG nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 117 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 969.761132] env[62619]: DEBUG nova.compute.provider_tree [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 117 to 118 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 969.761322] env[62619]: DEBUG nova.compute.provider_tree [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 969.896053] env[62619]: DEBUG nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 970.049269] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365243, 'name': ReconfigVM_Task, 'duration_secs': 0.190421} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.049603] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance '04e1e5ff-6385-4c3d-a226-355a171f7de0' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 970.188776] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365242, 'name': Destroy_Task, 'duration_secs': 0.583792} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 970.188979] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Destroyed the VM [ 970.189224] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Deleting Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:932}} [ 970.189482] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachineSnapshot.RemoveSnapshot_Task with opID=oslo.vmware-ef8a16c3-010d-4b3a-9a4d-13245290f9b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.195380] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 970.195380] env[62619]: value = "task-1365244" [ 970.195380] env[62619]: _type = "Task" [ 970.195380] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.203177] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365244, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.266732] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 3.475s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 970.269092] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 1.940s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 970.269355] env[62619]: DEBUG nova.objects.instance [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lazy-loading 'pci_requests' on Instance uuid 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.284916] env[62619]: INFO nova.scheduler.client.report [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted allocations for instance 4bc613db-af56-48b4-8c24-7f44428d8b4f [ 970.414862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 970.556276] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 970.556551] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 970.556714] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 970.556917] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 970.557093] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 970.557247] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 970.557464] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 970.557623] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 970.557788] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 970.557949] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 970.558144] env[62619]: DEBUG nova.virt.hardware [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 970.563375] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Reconfiguring VM instance instance-0000005b to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 970.563722] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-989fc9ef-ba26-4572-b7bc-00d34f948956 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 970.582740] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 970.582740] env[62619]: value = "task-1365245" [ 970.582740] env[62619]: _type = "Task" [ 970.582740] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 970.591820] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365245, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.705528] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365244, 'name': RemoveSnapshot_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 970.776812] env[62619]: DEBUG nova.objects.instance [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lazy-loading 'numa_topology' on Instance uuid 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 970.794147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d568df27-13bc-49e8-8d22-0c38140f7de2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "4bc613db-af56-48b4-8c24-7f44428d8b4f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.501s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 971.093887] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365245, 'name': ReconfigVM_Task, 'duration_secs': 0.167438} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.094237] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Reconfigured VM instance instance-0000005b to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 971.095119] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00e846ca-a59d-492b-8ac5-223a478d6488 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.118578] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Reconfiguring VM instance instance-0000005b to attach disk [datastore1] 04e1e5ff-6385-4c3d-a226-355a171f7de0/04e1e5ff-6385-4c3d-a226-355a171f7de0.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 971.118874] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-fd2b2261-6d8d-4f1a-bf0f-7e814af2005a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 971.137772] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 971.137772] env[62619]: value = "task-1365246" [ 971.137772] env[62619]: _type = "Task" [ 971.137772] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 971.149123] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365246, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 971.205283] env[62619]: DEBUG oslo_vmware.api [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365244, 'name': RemoveSnapshot_Task, 'duration_secs': 0.86685} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.205820] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Deleted Snapshot of the VM instance {{(pid=62619) _delete_vm_snapshot /opt/stack/nova/nova/virt/vmwareapi/vmops.py:938}} [ 971.281638] env[62619]: INFO nova.compute.claims [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 971.647412] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365246, 'name': ReconfigVM_Task, 'duration_secs': 0.278048} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 971.647696] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Reconfigured VM instance instance-0000005b to attach disk [datastore1] 04e1e5ff-6385-4c3d-a226-355a171f7de0/04e1e5ff-6385-4c3d-a226-355a171f7de0.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 971.647985] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance '04e1e5ff-6385-4c3d-a226-355a171f7de0' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 971.672683] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "1c406554-d91d-422a-9a5a-9e910fc51103" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 971.672925] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 971.711282] env[62619]: WARNING nova.compute.manager [None req-be9d96b6-ea0c-49e9-b563-0b64c6a7f755 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Image not found during snapshot: nova.exception.ImageNotFound: Image 26131b53-c780-4a5f-bcc1-55bc66be3873 could not be found. [ 972.154916] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c97ad5a0-d396-49c9-ac13-22c1b9d1b7b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.173497] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-22a34260-ad1e-4ed5-a741-aec078bb4aa5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.176195] env[62619]: DEBUG nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 972.201389] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance '04e1e5ff-6385-4c3d-a226-355a171f7de0' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 972.329062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.329062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.329062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.329273] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 972.329380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 972.331443] env[62619]: INFO nova.compute.manager [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Terminating instance [ 972.335615] env[62619]: DEBUG nova.compute.manager [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 972.335681] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 972.336502] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b951d04-28d5-4e19-8f98-897944b18b2b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.343801] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 972.344340] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-74b1d55a-6c30-4d23-8315-0c66122e149f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.352588] env[62619]: DEBUG oslo_vmware.api [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 972.352588] env[62619]: value = "task-1365247" [ 972.352588] env[62619]: _type = "Task" [ 972.352588] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.362140] env[62619]: DEBUG oslo_vmware.api [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365247, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 972.447253] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ad0ced1-b074-47f3-827a-0bd6f4328d56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.455336] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fd1325e-0dc6-43ab-a243-4e41892c0c3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.487556] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d0f6b9-44d0-42cd-8c07-530402f13830 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.495231] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9672c52-ae00-4a47-8a10-787fb0b7d343 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.509347] env[62619]: DEBUG nova.compute.provider_tree [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 972.696202] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 972.745015] env[62619]: DEBUG nova.network.neutron [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Port c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 972.863297] env[62619]: DEBUG oslo_vmware.api [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365247, 'name': PowerOffVM_Task, 'duration_secs': 0.281291} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 972.863599] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 972.863734] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 972.863991] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-e976eaf8-8f5f-47b2-ab7d-619e93eea242 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.921573] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 972.921811] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 972.921988] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleting the datastore file [datastore1] 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 972.922363] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37cbde08-9d09-4d93-bc2d-4002f1e9f664 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 972.929000] env[62619]: DEBUG oslo_vmware.api [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for the task: (returnval){ [ 972.929000] env[62619]: value = "task-1365249" [ 972.929000] env[62619]: _type = "Task" [ 972.929000] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 972.938048] env[62619]: DEBUG oslo_vmware.api [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365249, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 973.012753] env[62619]: DEBUG nova.scheduler.client.report [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 973.438809] env[62619]: DEBUG oslo_vmware.api [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Task: {'id': task-1365249, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.177556} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 973.439196] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 973.439280] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 973.439382] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 973.439555] env[62619]: INFO nova.compute.manager [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Took 1.10 seconds to destroy the instance on the hypervisor. [ 973.439793] env[62619]: DEBUG oslo.service.loopingcall [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 973.439986] env[62619]: DEBUG nova.compute.manager [-] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 973.440093] env[62619]: DEBUG nova.network.neutron [-] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 973.517926] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.249s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 973.520198] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 5.079s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.520455] env[62619]: DEBUG nova.objects.instance [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lazy-loading 'resources' on Instance uuid f3638527-351b-4ea8-bf7d-6e90bc68abe4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 973.565371] env[62619]: INFO nova.network.neutron [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating port 8c7fa88a-7dbd-49cf-b490-e311fa9a804e with attributes {'binding:host_id': 'cpu-1', 'device_owner': 'compute:nova'} [ 973.704658] env[62619]: DEBUG nova.compute.manager [req-02cd5a17-1a0e-4cfe-9ca2-1f4e57a111ed req-93b89cb1-fcba-4401-b66d-4311fe4124c5 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Received event network-vif-deleted-e14ea18e-2f1a-4153-9f37-ab23790db41f {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 973.704892] env[62619]: INFO nova.compute.manager [req-02cd5a17-1a0e-4cfe-9ca2-1f4e57a111ed req-93b89cb1-fcba-4401-b66d-4311fe4124c5 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Neutron deleted interface e14ea18e-2f1a-4153-9f37-ab23790db41f; detaching it from the instance and deleting it from the info cache [ 973.705321] env[62619]: DEBUG nova.network.neutron [req-02cd5a17-1a0e-4cfe-9ca2-1f4e57a111ed req-93b89cb1-fcba-4401-b66d-4311fe4124c5 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 973.769748] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 973.769996] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 973.770227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.174078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f90a5f6-6b1c-4342-9b71-0a4800dd38fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.181415] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f0c31b2-d3a9-4a5f-8c2d-86f928306ca6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.184570] env[62619]: DEBUG nova.network.neutron [-] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 974.211539] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-39dbd120-e767-4ede-9e94-1a6732bfaf19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.213717] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd54cc8c-3d75-4c68-afef-7c849622bcac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.222890] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6872808f-b5a8-4919-8179-1e049bc039d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.230121] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e66d9adc-ed14-4802-8078-0f3ec1e454d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 974.251168] env[62619]: DEBUG nova.compute.provider_tree [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 974.259569] env[62619]: DEBUG nova.compute.manager [req-02cd5a17-1a0e-4cfe-9ca2-1f4e57a111ed req-93b89cb1-fcba-4401-b66d-4311fe4124c5 service nova] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Detach interface failed, port_id=e14ea18e-2f1a-4153-9f37-ab23790db41f, reason: Instance 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 974.260619] env[62619]: DEBUG nova.scheduler.client.report [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 974.687342] env[62619]: INFO nova.compute.manager [-] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Took 1.25 seconds to deallocate network for instance. [ 974.765511] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.245s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 974.767816] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.162s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 974.769387] env[62619]: INFO nova.compute.claims [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 974.790492] env[62619]: INFO nova.scheduler.client.report [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted allocations for instance f3638527-351b-4ea8-bf7d-6e90bc68abe4 [ 974.806992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 974.806992] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 974.807180] env[62619]: DEBUG nova.network.neutron [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 975.194066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.286124] env[62619]: DEBUG nova.compute.manager [req-39220019-a86c-4e8a-8222-4c7b22b77137 req-3ec26676-d0ba-4407-8a7e-dae84aff9645 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received event network-vif-plugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 975.286304] env[62619]: DEBUG oslo_concurrency.lockutils [req-39220019-a86c-4e8a-8222-4c7b22b77137 req-3ec26676-d0ba-4407-8a7e-dae84aff9645 service nova] Acquiring lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 975.286518] env[62619]: DEBUG oslo_concurrency.lockutils [req-39220019-a86c-4e8a-8222-4c7b22b77137 req-3ec26676-d0ba-4407-8a7e-dae84aff9645 service nova] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 975.286689] env[62619]: DEBUG oslo_concurrency.lockutils [req-39220019-a86c-4e8a-8222-4c7b22b77137 req-3ec26676-d0ba-4407-8a7e-dae84aff9645 service nova] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.286861] env[62619]: DEBUG nova.compute.manager [req-39220019-a86c-4e8a-8222-4c7b22b77137 req-3ec26676-d0ba-4407-8a7e-dae84aff9645 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] No waiting events found dispatching network-vif-plugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 975.287112] env[62619]: WARNING nova.compute.manager [req-39220019-a86c-4e8a-8222-4c7b22b77137 req-3ec26676-d0ba-4407-8a7e-dae84aff9645 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received unexpected event network-vif-plugged-8c7fa88a-7dbd-49cf-b490-e311fa9a804e for instance with vm_state shelved_offloaded and task_state spawning. [ 975.297446] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f87191b0-f7b9-46eb-9baa-9607282170f2 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f3638527-351b-4ea8-bf7d-6e90bc68abe4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.876s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 975.405946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 975.405946] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 975.406171] env[62619]: DEBUG nova.network.neutron [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 975.521915] env[62619]: DEBUG nova.network.neutron [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance_info_cache with network_info: [{"id": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "address": "fa:16:3e:51:5c:ce", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f7c57e-c3", "ovs_interfaceid": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 975.904765] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-847fce4a-eab7-4b60-a7ba-dc896ca98a2a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.913910] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40ed6b8c-93ad-49a0-9ba8-6ea6de6473e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.949350] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e911c9e7-b3e7-41fd-851c-adf37dfc88b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.957381] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8537adb-5234-4d7f-929b-25df8d9483fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 975.971111] env[62619]: DEBUG nova.compute.provider_tree [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 976.024099] env[62619]: DEBUG oslo_concurrency.lockutils [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.121221] env[62619]: DEBUG nova.network.neutron [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [{"id": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "address": "fa:16:3e:2e:a8:22", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7fa88a-7d", "ovs_interfaceid": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 976.474781] env[62619]: DEBUG nova.scheduler.client.report [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 976.546942] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7c0b645-d6e2-4460-953b-671ba045d751 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.566931] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-071a8dcb-e18b-4f96-9946-8066fed1b20c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.572978] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance '04e1e5ff-6385-4c3d-a226-355a171f7de0' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 976.623640] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 976.648387] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='2dc65b8804c9ed15c4b440b59435f438',container_format='bare',created_at=2024-10-25T17:06:11Z,direct_url=,disk_format='vmdk',id=9845866c-f4a3-4a1c-a044-56cf34137267,min_disk=1,min_ram=0,name='tempest-ServersNegativeTestJSON-server-1800400354-shelved',owner='578df9b6434d416fbae5f3cf2c33ef1b',properties=ImageMetaProps,protected=,size=31666688,status='active',tags=,updated_at=2024-10-25T17:06:26Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 976.648643] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 976.648804] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 976.648989] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 976.649162] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 976.649314] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 976.649520] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 976.649682] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 976.649852] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 976.650026] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 976.650206] env[62619]: DEBUG nova.virt.hardware [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 976.651146] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a2d38e1-6269-452f-9999-a69128d38354 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.658994] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c77945b4-60e5-43ce-bf4c-ff154c84f8f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.672027] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:2e:a8:22', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '447ff42d-b33e-4b5d-8b7f-e8117ebbbc92', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8c7fa88a-7dbd-49cf-b490-e311fa9a804e', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 976.679134] env[62619]: DEBUG oslo.service.loopingcall [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 976.679363] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 976.679561] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8974213f-905a-4aed-8335-3a0016b7a347 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 976.697911] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 976.697911] env[62619]: value = "task-1365250" [ 976.697911] env[62619]: _type = "Task" [ 976.697911] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 976.707799] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365250, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 976.861219] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "f5560f62-634d-42e5-9354-68089db01e33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 976.861458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f5560f62-634d-42e5-9354-68089db01e33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.950858] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.951255] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 976.979797] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.212s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 976.980346] env[62619]: DEBUG nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 976.982994] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.568s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 976.984616] env[62619]: INFO nova.compute.claims [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 977.079943] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 977.079943] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-a8792d83-2f05-451f-883e-d24f0d1f6fd2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.086992] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 977.086992] env[62619]: value = "task-1365251" [ 977.086992] env[62619]: _type = "Task" [ 977.086992] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.094795] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365251, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.208656] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365250, 'name': CreateVM_Task, 'duration_secs': 0.28325} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.208850] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 977.209597] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.209832] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.210256] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 977.210528] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e8eaa7c0-570e-4c65-aab5-19ed9c0bd93d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.215158] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 977.215158] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524d8324-2941-a552-af00-8027d1384bc6" [ 977.215158] env[62619]: _type = "Task" [ 977.215158] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.222047] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524d8324-2941-a552-af00-8027d1384bc6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.312694] env[62619]: DEBUG nova.compute.manager [req-ae587e2e-443e-45b3-90a9-1106b16dc881 req-9afc1fbc-5b80-4a19-b239-da3ae810ca26 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received event network-changed-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 977.312694] env[62619]: DEBUG nova.compute.manager [req-ae587e2e-443e-45b3-90a9-1106b16dc881 req-9afc1fbc-5b80-4a19-b239-da3ae810ca26 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Refreshing instance network info cache due to event network-changed-8c7fa88a-7dbd-49cf-b490-e311fa9a804e. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 977.312812] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae587e2e-443e-45b3-90a9-1106b16dc881 req-9afc1fbc-5b80-4a19-b239-da3ae810ca26 service nova] Acquiring lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.312875] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae587e2e-443e-45b3-90a9-1106b16dc881 req-9afc1fbc-5b80-4a19-b239-da3ae810ca26 service nova] Acquired lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.313193] env[62619]: DEBUG nova.network.neutron [req-ae587e2e-443e-45b3-90a9-1106b16dc881 req-9afc1fbc-5b80-4a19-b239-da3ae810ca26 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Refreshing network info cache for port 8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 977.364254] env[62619]: DEBUG nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 977.456731] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 977.457256] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 977.457256] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 977.488880] env[62619]: DEBUG nova.compute.utils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 977.492201] env[62619]: DEBUG nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 977.492409] env[62619]: DEBUG nova.network.neutron [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 977.532725] env[62619]: DEBUG nova.policy [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9788d9778eb248149078c082538e4fa2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b6e92987102742d9b65b83850b6e5e7e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 977.599222] env[62619]: DEBUG oslo_vmware.api [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365251, 'name': PowerOnVM_Task, 'duration_secs': 0.377021} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 977.599502] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 977.599689] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-654ad8b7-140c-486a-a342-7c122bf5adb6 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance '04e1e5ff-6385-4c3d-a226-355a171f7de0' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 977.725346] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 977.725591] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Processing image 9845866c-f4a3-4a1c-a044-56cf34137267 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 977.725830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267/9845866c-f4a3-4a1c-a044-56cf34137267.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.725982] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "[datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267/9845866c-f4a3-4a1c-a044-56cf34137267.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 977.726199] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 977.726448] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f364a3cd-1142-46e5-8927-b80f80a9732c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.743905] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 977.744229] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 977.745067] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a4edede-04bd-458a-9171-2cc057e0d0fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 977.750379] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 977.750379] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c368ca-f044-3e2c-807a-14fec5ccd897" [ 977.750379] env[62619]: _type = "Task" [ 977.750379] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 977.759708] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52c368ca-f044-3e2c-807a-14fec5ccd897, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 977.785101] env[62619]: DEBUG nova.network.neutron [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Successfully created port: 7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 977.886418] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 977.961204] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 977.961592] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Skipping network cache update for instance because it is Building. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9941}} [ 977.961592] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 977.998382] env[62619]: DEBUG nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 978.001487] env[62619]: DEBUG nova.network.neutron [req-ae587e2e-443e-45b3-90a9-1106b16dc881 req-9afc1fbc-5b80-4a19-b239-da3ae810ca26 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updated VIF entry in instance network info cache for port 8c7fa88a-7dbd-49cf-b490-e311fa9a804e. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 978.001849] env[62619]: DEBUG nova.network.neutron [req-ae587e2e-443e-45b3-90a9-1106b16dc881 req-9afc1fbc-5b80-4a19-b239-da3ae810ca26 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [{"id": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "address": "fa:16:3e:2e:a8:22", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7fa88a-7d", "ovs_interfaceid": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 978.138877] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73375446-d09c-4591-98fb-e702f3befd4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.146403] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0258d62-2cbf-488c-9bab-670128c364c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.174912] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f651136-23a2-4b2c-8287-764267827342 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.181727] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4de72a3-8312-4750-9306-2cceb3bf28a6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.195609] env[62619]: DEBUG nova.compute.provider_tree [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 978.260457] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Preparing fetch location {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 978.260663] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Fetch image to [datastore1] OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983/OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983.vmdk {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 978.260848] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Downloading stream optimized image 9845866c-f4a3-4a1c-a044-56cf34137267 to [datastore1] OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983/OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983.vmdk on the data store datastore1 as vApp {{(pid=62619) _fetch_image_as_vapp /opt/stack/nova/nova/virt/vmwareapi/vmops.py:437}} [ 978.261033] env[62619]: DEBUG nova.virt.vmwareapi.images [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Downloading image file data 9845866c-f4a3-4a1c-a044-56cf34137267 to the ESX as VM named 'OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983' {{(pid=62619) fetch_image_stream_optimized /opt/stack/nova/nova/virt/vmwareapi/images.py:336}} [ 978.329958] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating HttpNfcLease lease for vApp import into resource pool: (val){ [ 978.329958] env[62619]: value = "resgroup-9" [ 978.329958] env[62619]: _type = "ResourcePool" [ 978.329958] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:453}} [ 978.330197] env[62619]: DEBUG oslo_vmware.service [-] Invoking ResourcePool.ImportVApp with opID=oslo.vmware-63ff968a-1f5d-4603-a12c-1a374733a58f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 978.352020] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lease: (returnval){ [ 978.352020] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e41d0-c5d3-4c4e-b606-998c57f4d02a" [ 978.352020] env[62619]: _type = "HttpNfcLease" [ 978.352020] env[62619]: } obtained for vApp import into resource pool (val){ [ 978.352020] env[62619]: value = "resgroup-9" [ 978.352020] env[62619]: _type = "ResourcePool" [ 978.352020] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:461}} [ 978.352020] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the lease: (returnval){ [ 978.352020] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e41d0-c5d3-4c4e-b606-998c57f4d02a" [ 978.352020] env[62619]: _type = "HttpNfcLease" [ 978.352020] env[62619]: } to be ready. {{(pid=62619) wait_for_lease_ready /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:462}} [ 978.360737] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.360737] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e41d0-c5d3-4c4e-b606-998c57f4d02a" [ 978.360737] env[62619]: _type = "HttpNfcLease" [ 978.360737] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 978.508176] env[62619]: DEBUG oslo_concurrency.lockutils [req-ae587e2e-443e-45b3-90a9-1106b16dc881 req-9afc1fbc-5b80-4a19-b239-da3ae810ca26 service nova] Releasing lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 978.508694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquired lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 978.508850] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 978.509000] env[62619]: DEBUG nova.objects.instance [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lazy-loading 'info_cache' on Instance uuid 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 978.698845] env[62619]: DEBUG nova.scheduler.client.report [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 978.860642] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 978.860642] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e41d0-c5d3-4c4e-b606-998c57f4d02a" [ 978.860642] env[62619]: _type = "HttpNfcLease" [ 978.860642] env[62619]: } is initializing. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:490}} [ 979.010308] env[62619]: DEBUG nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 979.035920] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 979.036218] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 979.036381] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 979.036560] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 979.036709] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 979.036862] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 979.037083] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 979.037243] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 979.037410] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 979.037574] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 979.037746] env[62619]: DEBUG nova.virt.hardware [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 979.038648] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59c4b6a2-5193-44f4-8af2-33f23dee0388 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.047518] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e41741e7-1336-46b8-9950-88b6a84dce51 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.178072] env[62619]: DEBUG nova.compute.manager [req-8421554d-08e5-43bc-a1f6-c2055525b5d1 req-593f6b2b-72e7-4faf-95e4-c70317a96e8a service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Received event network-vif-plugged-7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 979.178304] env[62619]: DEBUG oslo_concurrency.lockutils [req-8421554d-08e5-43bc-a1f6-c2055525b5d1 req-593f6b2b-72e7-4faf-95e4-c70317a96e8a service nova] Acquiring lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 979.178520] env[62619]: DEBUG oslo_concurrency.lockutils [req-8421554d-08e5-43bc-a1f6-c2055525b5d1 req-593f6b2b-72e7-4faf-95e4-c70317a96e8a service nova] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.178692] env[62619]: DEBUG oslo_concurrency.lockutils [req-8421554d-08e5-43bc-a1f6-c2055525b5d1 req-593f6b2b-72e7-4faf-95e4-c70317a96e8a service nova] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.178864] env[62619]: DEBUG nova.compute.manager [req-8421554d-08e5-43bc-a1f6-c2055525b5d1 req-593f6b2b-72e7-4faf-95e4-c70317a96e8a service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] No waiting events found dispatching network-vif-plugged-7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 979.179177] env[62619]: WARNING nova.compute.manager [req-8421554d-08e5-43bc-a1f6-c2055525b5d1 req-593f6b2b-72e7-4faf-95e4-c70317a96e8a service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Received unexpected event network-vif-plugged-7c383235-8fc6-41ec-b559-12d1ff143a09 for instance with vm_state building and task_state spawning. [ 979.204215] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.221s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 979.204745] env[62619]: DEBUG nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 979.207625] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 6.512s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 979.209064] env[62619]: INFO nova.compute.claims [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 979.273896] env[62619]: DEBUG nova.network.neutron [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Successfully updated port: 7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 979.361630] env[62619]: DEBUG oslo_vmware.api [-] Lease: (returnval){ [ 979.361630] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e41d0-c5d3-4c4e-b606-998c57f4d02a" [ 979.361630] env[62619]: _type = "HttpNfcLease" [ 979.361630] env[62619]: } is ready. {{(pid=62619) _poll_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:487}} [ 979.361934] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Invoking VIM API for reading info of lease: (returnval){ [ 979.361934] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e41d0-c5d3-4c4e-b606-998c57f4d02a" [ 979.361934] env[62619]: _type = "HttpNfcLease" [ 979.361934] env[62619]: }. {{(pid=62619) _create_import_vapp_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:467}} [ 979.362838] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-923b2efc-2b34-4e1b-bc39-b16658806c05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.372236] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Found VMDK URL: https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521093cb-d837-00cc-af90-d37c918ad0a6/disk-0.vmdk from lease info. {{(pid=62619) _find_vmdk_url /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:531}} [ 979.372408] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating HTTP connection to write to file with size = 31666688 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521093cb-d837-00cc-af90-d37c918ad0a6/disk-0.vmdk. {{(pid=62619) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 979.438467] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseProgress with opID=oslo.vmware-9fa81ef3-b8f3-487f-9ce9-073a63c87ee4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 979.717069] env[62619]: DEBUG nova.compute.utils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 979.719282] env[62619]: DEBUG nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 979.719484] env[62619]: DEBUG nova.network.neutron [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 979.775574] env[62619]: DEBUG nova.policy [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f2b8765639f34e6c96d2fd07f351aa25', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '61cbb5aaa69e48b4a6c820b898bbe734', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 979.777713] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 979.777897] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 979.778120] env[62619]: DEBUG nova.network.neutron [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.104354] env[62619]: DEBUG nova.network.neutron [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Successfully created port: b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 980.224043] env[62619]: DEBUG nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 980.305142] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [{"id": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "address": "fa:16:3e:2e:a8:22", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7fa88a-7d", "ovs_interfaceid": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.339272] env[62619]: DEBUG nova.network.neutron [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 980.405369] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 980.405640] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 980.405864] env[62619]: DEBUG nova.compute.manager [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Going to confirm migration 2 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 980.432181] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7870259-42b0-4b2e-9e7e-c50f29ce50e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.444717] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c71031e0-731a-4fbc-9d6b-ea54bfd25910 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.501575] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b3bb8fea-f361-44c1-bf88-a04afae7a45c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.510704] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d396650d-a40b-4349-b7a8-9b558f5d4d57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.530657] env[62619]: DEBUG nova.compute.provider_tree [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 980.560508] env[62619]: DEBUG nova.network.neutron [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating instance_info_cache with network_info: [{"id": "7c383235-8fc6-41ec-b559-12d1ff143a09", "address": "fa:16:3e:08:41:51", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c383235-8f", "ovs_interfaceid": "7c383235-8fc6-41ec-b559-12d1ff143a09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 980.755869] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Completed reading data from the image iterator. {{(pid=62619) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 980.755869] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Getting lease state for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521093cb-d837-00cc-af90-d37c918ad0a6/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:423}} [ 980.756877] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4df03999-e201-47ab-8f86-512f8d5a8f6e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.763984] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521093cb-d837-00cc-af90-d37c918ad0a6/disk-0.vmdk is in state: ready. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:430}} [ 980.764259] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lease for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521093cb-d837-00cc-af90-d37c918ad0a6/disk-0.vmdk. {{(pid=62619) _release_lease /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:440}} [ 980.764564] env[62619]: DEBUG oslo_vmware.service [-] Invoking HttpNfcLease.HttpNfcLeaseComplete with opID=oslo.vmware-1c73206c-1d54-45ab-885d-a9225a425208 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 980.808275] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Releasing lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 980.808492] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 980.808689] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.808861] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.809013] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.809165] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.809307] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.809452] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.809572] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 980.809714] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 980.970618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 980.970819] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 980.970998] env[62619]: DEBUG nova.network.neutron [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 980.971206] env[62619]: DEBUG nova.objects.instance [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lazy-loading 'info_cache' on Instance uuid 04e1e5ff-6385-4c3d-a226-355a171f7de0 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.036855] env[62619]: DEBUG nova.scheduler.client.report [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 981.064755] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 981.065156] env[62619]: DEBUG nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Instance network_info: |[{"id": "7c383235-8fc6-41ec-b559-12d1ff143a09", "address": "fa:16:3e:08:41:51", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c383235-8f", "ovs_interfaceid": "7c383235-8fc6-41ec-b559-12d1ff143a09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 981.065576] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:41:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'd69a4b11-8d65-435f-94a5-28f74a39a718', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7c383235-8fc6-41ec-b559-12d1ff143a09', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 981.072879] env[62619]: DEBUG oslo.service.loopingcall [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 981.073316] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 981.073544] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6e7858d3-55d2-4f42-868a-604bb1bf657f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.092359] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 981.092359] env[62619]: value = "task-1365253" [ 981.092359] env[62619]: _type = "Task" [ 981.092359] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.099650] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365253, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.208056] env[62619]: DEBUG nova.compute.manager [req-6a296961-9ad0-45c4-912f-b83be4f8435a req-29e9aa80-d09d-4e25-8179-3c56ac76aa74 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Received event network-changed-7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 981.208350] env[62619]: DEBUG nova.compute.manager [req-6a296961-9ad0-45c4-912f-b83be4f8435a req-29e9aa80-d09d-4e25-8179-3c56ac76aa74 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Refreshing instance network info cache due to event network-changed-7c383235-8fc6-41ec-b559-12d1ff143a09. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 981.208614] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a296961-9ad0-45c4-912f-b83be4f8435a req-29e9aa80-d09d-4e25-8179-3c56ac76aa74 service nova] Acquiring lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.208826] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a296961-9ad0-45c4-912f-b83be4f8435a req-29e9aa80-d09d-4e25-8179-3c56ac76aa74 service nova] Acquired lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.209072] env[62619]: DEBUG nova.network.neutron [req-6a296961-9ad0-45c4-912f-b83be4f8435a req-29e9aa80-d09d-4e25-8179-3c56ac76aa74 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Refreshing network info cache for port 7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 981.235428] env[62619]: DEBUG nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 981.264024] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 981.264024] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 981.265036] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 981.265036] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 981.265222] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 981.265377] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 981.265588] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 981.265822] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 981.266024] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 981.266198] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 981.266379] env[62619]: DEBUG nova.virt.hardware [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 981.267295] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4df1cdc-1cb0-4a87-a9a3-47cc8de516af {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.275521] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb7d3ea6-aaa2-4306-bacd-d9217d43c5a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.291896] env[62619]: DEBUG oslo_vmware.rw_handles [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Closed VMDK write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap/nfc/521093cb-d837-00cc-af90-d37c918ad0a6/disk-0.vmdk. {{(pid=62619) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:646}} [ 981.292124] env[62619]: INFO nova.virt.vmwareapi.images [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Downloaded image file data 9845866c-f4a3-4a1c-a044-56cf34137267 [ 981.292871] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d476b8a-de11-4c63-8d46-efb5184642c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.307490] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-29a2fba9-fcad-47a0-bd02-17fc04efe4e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.311999] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 981.364235] env[62619]: INFO nova.virt.vmwareapi.images [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] The imported VM was unregistered [ 981.366906] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Caching image {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 981.367192] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Creating directory with path [datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 981.367484] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e814e99d-42a3-4c85-b746-55808c5a7af4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.383547] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Created directory with path [datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267 {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 981.383742] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Moving virtual disk from [datastore1] OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983/OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983.vmdk to [datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267/9845866c-f4a3-4a1c-a044-56cf34137267.vmdk. {{(pid=62619) disk_move /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:263}} [ 981.384012] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.MoveVirtualDisk_Task with opID=oslo.vmware-939f5456-8cac-45f9-b22e-3a12e6fdc45a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.390141] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 981.390141] env[62619]: value = "task-1365255" [ 981.390141] env[62619]: _type = "Task" [ 981.390141] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.397652] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365255, 'name': MoveVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.541934] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.334s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 981.542839] env[62619]: DEBUG nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 981.545862] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 6.352s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 981.546104] env[62619]: DEBUG nova.objects.instance [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lazy-loading 'resources' on Instance uuid 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 981.602150] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365253, 'name': CreateVM_Task, 'duration_secs': 0.407396} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 981.602354] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 981.603165] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 981.603292] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 981.603629] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 981.603911] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d6ad882-42cb-4ea5-9352-3639da45dc23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 981.609052] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 981.609052] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523eacac-16ac-ec4c-5553-f51048d8a969" [ 981.609052] env[62619]: _type = "Task" [ 981.609052] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 981.617248] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523eacac-16ac-ec4c-5553-f51048d8a969, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.843299] env[62619]: DEBUG nova.network.neutron [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Successfully updated port: b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 981.900106] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365255, 'name': MoveVirtualDisk_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 981.975164] env[62619]: DEBUG nova.network.neutron [req-6a296961-9ad0-45c4-912f-b83be4f8435a req-29e9aa80-d09d-4e25-8179-3c56ac76aa74 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updated VIF entry in instance network info cache for port 7c383235-8fc6-41ec-b559-12d1ff143a09. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 981.975626] env[62619]: DEBUG nova.network.neutron [req-6a296961-9ad0-45c4-912f-b83be4f8435a req-29e9aa80-d09d-4e25-8179-3c56ac76aa74 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating instance_info_cache with network_info: [{"id": "7c383235-8fc6-41ec-b559-12d1ff143a09", "address": "fa:16:3e:08:41:51", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c383235-8f", "ovs_interfaceid": "7c383235-8fc6-41ec-b559-12d1ff143a09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.049166] env[62619]: DEBUG nova.compute.utils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 982.054619] env[62619]: DEBUG nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 982.054818] env[62619]: DEBUG nova.network.neutron [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 982.121512] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]523eacac-16ac-ec4c-5553-f51048d8a969, 'name': SearchDatastore_Task, 'duration_secs': 0.091207} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.121887] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.122156] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 982.122414] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.122540] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.122722] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 982.123044] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ef864bd8-d8d2-43b8-8595-b6bad89d5040 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.131367] env[62619]: DEBUG nova.policy [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '27ce526a1d824fe8b6573fa80adcd53f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '33242a5e0a764cf3b8af687fc4302e8e', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 982.136211] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 982.136417] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 982.137266] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-afb1af0d-5faa-4d66-b777-be56d1ff04b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.144961] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 982.144961] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527a012c-a3b3-b050-8d9e-4e96b2e488c7" [ 982.144961] env[62619]: _type = "Task" [ 982.144961] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.157125] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527a012c-a3b3-b050-8d9e-4e96b2e488c7, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.246111] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ac55a7b-e323-435f-b324-c084fb9c8a39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.253910] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2e7d22-1095-40a1-a5c3-efbc277b7e29 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.284646] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f42b1236-c15e-45a9-8999-03676a6d21b4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.288211] env[62619]: DEBUG nova.network.neutron [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance_info_cache with network_info: [{"id": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "address": "fa:16:3e:51:5c:ce", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc2f7c57e-c3", "ovs_interfaceid": "c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 982.294780] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c3330df-c00e-4973-a0c9-84d81431d58f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.311177] env[62619]: DEBUG nova.compute.provider_tree [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 982.345969] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 982.346149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquired lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 982.346293] env[62619]: DEBUG nova.network.neutron [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 982.400411] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365255, 'name': MoveVirtualDisk_Task} progress is 38%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.479710] env[62619]: DEBUG oslo_concurrency.lockutils [req-6a296961-9ad0-45c4-912f-b83be4f8435a req-29e9aa80-d09d-4e25-8179-3c56ac76aa74 service nova] Releasing lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.487669] env[62619]: DEBUG nova.network.neutron [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Successfully created port: b2d9c045-8c75-4e70-a8de-8f4587d036a1 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 982.555755] env[62619]: DEBUG nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 982.655739] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527a012c-a3b3-b050-8d9e-4e96b2e488c7, 'name': SearchDatastore_Task, 'duration_secs': 0.097052} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 982.656492] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b90eea78-1977-4b2d-824e-83b67cba651b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 982.664254] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 982.664254] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d14ea8-401d-0a3a-0a7e-e9d2c14a773b" [ 982.664254] env[62619]: _type = "Task" [ 982.664254] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 982.670613] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d14ea8-401d-0a3a-0a7e-e9d2c14a773b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 982.791916] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-04e1e5ff-6385-4c3d-a226-355a171f7de0" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 982.792309] env[62619]: DEBUG nova.objects.instance [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lazy-loading 'migration_context' on Instance uuid 04e1e5ff-6385-4c3d-a226-355a171f7de0 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 982.813876] env[62619]: DEBUG nova.scheduler.client.report [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 982.879398] env[62619]: DEBUG nova.network.neutron [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 982.905561] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365255, 'name': MoveVirtualDisk_Task} progress is 60%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.024983] env[62619]: DEBUG nova.network.neutron [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updating instance_info_cache with network_info: [{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.173078] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d14ea8-401d-0a3a-0a7e-e9d2c14a773b, 'name': SearchDatastore_Task, 'duration_secs': 0.095652} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.173376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.173644] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2/52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 983.173998] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e83c7c3c-fda8-48ef-bdbb-d416b902ec94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.182446] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 983.182446] env[62619]: value = "task-1365256" [ 983.182446] env[62619]: _type = "Task" [ 983.182446] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.190605] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.234910] env[62619]: DEBUG nova.compute.manager [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Received event network-vif-plugged-b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 983.234910] env[62619]: DEBUG oslo_concurrency.lockutils [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] Acquiring lock "69e916ee-9edc-4e1b-85a0-40142364e3bb-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.235056] env[62619]: DEBUG oslo_concurrency.lockutils [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] Lock "69e916ee-9edc-4e1b-85a0-40142364e3bb-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.235237] env[62619]: DEBUG oslo_concurrency.lockutils [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] Lock "69e916ee-9edc-4e1b-85a0-40142364e3bb-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.235409] env[62619]: DEBUG nova.compute.manager [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] No waiting events found dispatching network-vif-plugged-b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 983.235576] env[62619]: WARNING nova.compute.manager [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Received unexpected event network-vif-plugged-b8ca3257-e811-48cd-ac4a-662b49bf41f3 for instance with vm_state building and task_state spawning. [ 983.235739] env[62619]: DEBUG nova.compute.manager [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Received event network-changed-b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 983.235895] env[62619]: DEBUG nova.compute.manager [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Refreshing instance network info cache due to event network-changed-b8ca3257-e811-48cd-ac4a-662b49bf41f3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 983.236104] env[62619]: DEBUG oslo_concurrency.lockutils [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] Acquiring lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 983.295104] env[62619]: DEBUG nova.objects.base [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Object Instance<04e1e5ff-6385-4c3d-a226-355a171f7de0> lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 983.296092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dca5bc9-bbdd-444b-9892-c5b5ec64a267 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.315658] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d80e3686-fae7-41c1-b8ea-6b8a3f322d6b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.318663] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.773s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.321113] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 5.435s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.322590] env[62619]: INFO nova.compute.claims [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 983.329543] env[62619]: DEBUG oslo_vmware.api [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 983.329543] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5276a706-566e-0b7c-4cc5-f5138787cc73" [ 983.329543] env[62619]: _type = "Task" [ 983.329543] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.338402] env[62619]: DEBUG oslo_vmware.api [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5276a706-566e-0b7c-4cc5-f5138787cc73, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.346558] env[62619]: INFO nova.scheduler.client.report [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Deleted allocations for instance 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e [ 983.401404] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365255, 'name': MoveVirtualDisk_Task} progress is 80%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.530874] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Releasing lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 983.530874] env[62619]: DEBUG nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Instance network_info: |[{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 983.531237] env[62619]: DEBUG oslo_concurrency.lockutils [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] Acquired lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 983.531237] env[62619]: DEBUG nova.network.neutron [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Refreshing network info cache for port b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 983.532676] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a6:f3:c6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4406a73e-2189-46ac-9e96-4f0af80b5094', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b8ca3257-e811-48cd-ac4a-662b49bf41f3', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 983.540362] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Creating folder: Project (61cbb5aaa69e48b4a6c820b898bbe734). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 983.543670] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-561051a7-5f05-422d-9b37-563a9c381f2c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.556371] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Created folder: Project (61cbb5aaa69e48b4a6c820b898bbe734) in parent group-v290436. [ 983.556638] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Creating folder: Instances. Parent ref: group-v290537. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 983.556862] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e7251902-380e-4255-844a-c484ac4d53fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.565660] env[62619]: DEBUG nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 983.569539] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Created folder: Instances in parent group-v290537. [ 983.569807] env[62619]: DEBUG oslo.service.loopingcall [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 983.570291] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 983.570291] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-292ee868-5708-4027-aee5-dc766241c7bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.594419] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 983.594419] env[62619]: value = "task-1365259" [ 983.594419] env[62619]: _type = "Task" [ 983.594419] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 983.602210] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 983.602568] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 983.602656] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 983.602878] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 983.603049] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 983.603280] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 983.603557] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 983.603824] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 983.604113] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 983.604385] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 983.604578] env[62619]: DEBUG nova.virt.hardware [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 983.605561] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-416add8d-7407-4973-b5ca-abb8b528c6df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.614469] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365259, 'name': CreateVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.618062] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b9f2799-de62-4189-904e-39529780722b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 983.695801] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365256, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.810746] env[62619]: DEBUG nova.network.neutron [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updated VIF entry in instance network info cache for port b8ca3257-e811-48cd-ac4a-662b49bf41f3. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 983.811200] env[62619]: DEBUG nova.network.neutron [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updating instance_info_cache with network_info: [{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 983.843209] env[62619]: DEBUG oslo_vmware.api [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5276a706-566e-0b7c-4cc5-f5138787cc73, 'name': SearchDatastore_Task, 'duration_secs': 0.044635} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 983.843509] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.855130] env[62619]: DEBUG oslo_concurrency.lockutils [None req-bba0771c-0bc3-4bb2-b158-e3755beb9cd3 tempest-ImagesTestJSON-13558914 tempest-ImagesTestJSON-13558914-project-member] Lock "1cbb03c7-5b97-4c3b-aee0-5fa948027a4e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 11.526s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.902479] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365255, 'name': MoveVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 983.968442] env[62619]: DEBUG nova.compute.manager [req-a071b592-268e-46b4-a2fa-adb103349748 req-e3191c6e-82b4-472c-8c66-936960cb9b66 service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Received event network-vif-plugged-b2d9c045-8c75-4e70-a8de-8f4587d036a1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 983.968681] env[62619]: DEBUG oslo_concurrency.lockutils [req-a071b592-268e-46b4-a2fa-adb103349748 req-e3191c6e-82b4-472c-8c66-936960cb9b66 service nova] Acquiring lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 983.968911] env[62619]: DEBUG oslo_concurrency.lockutils [req-a071b592-268e-46b4-a2fa-adb103349748 req-e3191c6e-82b4-472c-8c66-936960cb9b66 service nova] Lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 983.969114] env[62619]: DEBUG oslo_concurrency.lockutils [req-a071b592-268e-46b4-a2fa-adb103349748 req-e3191c6e-82b4-472c-8c66-936960cb9b66 service nova] Lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 983.969293] env[62619]: DEBUG nova.compute.manager [req-a071b592-268e-46b4-a2fa-adb103349748 req-e3191c6e-82b4-472c-8c66-936960cb9b66 service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] No waiting events found dispatching network-vif-plugged-b2d9c045-8c75-4e70-a8de-8f4587d036a1 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 983.969459] env[62619]: WARNING nova.compute.manager [req-a071b592-268e-46b4-a2fa-adb103349748 req-e3191c6e-82b4-472c-8c66-936960cb9b66 service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Received unexpected event network-vif-plugged-b2d9c045-8c75-4e70-a8de-8f4587d036a1 for instance with vm_state building and task_state spawning. [ 984.104676] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365259, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.195196] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365256, 'name': CopyVirtualDisk_Task} progress is 51%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.314608] env[62619]: DEBUG oslo_concurrency.lockutils [req-e891a627-4a7b-45e4-a28b-320d8d3780f8 req-b37a738f-ed86-4d1b-8b34-fce833bdccf2 service nova] Releasing lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.405367] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365255, 'name': MoveVirtualDisk_Task, 'duration_secs': 2.550678} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.406024] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Moved virtual disk from [datastore1] OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983/OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983.vmdk to [datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267/9845866c-f4a3-4a1c-a044-56cf34137267.vmdk. [ 984.406024] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Cleaning up location [datastore1] OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:640}} [ 984.406578] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleting the datastore file [datastore1] OSTACK_IMG_924ea522-351b-4a2e-9ee2-f6d766e38983 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 984.406578] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f0d85eb9-b9e5-4afd-8c33-eafda1208d56 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.413962] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 984.413962] env[62619]: value = "task-1365260" [ 984.413962] env[62619]: _type = "Task" [ 984.413962] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.421518] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365260, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.488104] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56a3112a-3843-4743-8680-8786e84cb169 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.496226] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc7fbd77-f6a2-43bf-a43f-319d50d7aeb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.531267] env[62619]: DEBUG nova.network.neutron [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Successfully updated port: b2d9c045-8c75-4e70-a8de-8f4587d036a1 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 984.532829] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf343437-1d0e-4fe3-a239-cc82ead8499a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.539754] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3f60707-12c2-4edd-8f28-e97908b00334 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.554605] env[62619]: DEBUG nova.compute.provider_tree [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 984.604943] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365259, 'name': CreateVM_Task, 'duration_secs': 0.761461} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.605264] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 984.606017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 984.606271] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 984.606712] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 984.607060] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22dee9d8-7e58-4a08-b247-3a6deb6580b2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.611749] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 984.611749] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5246de3a-911c-ed02-e506-f3b09534ac9f" [ 984.611749] env[62619]: _type = "Task" [ 984.611749] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.620907] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5246de3a-911c-ed02-e506-f3b09534ac9f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.694867] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365256, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.166063} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.695233] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2/52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 984.695599] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 984.695877] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-e0401e19-0b48-4c42-b5b6-1dbd2cdfd625 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.702699] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 984.702699] env[62619]: value = "task-1365261" [ 984.702699] env[62619]: _type = "Task" [ 984.702699] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.711595] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365261, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 984.924341] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365260, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.034182} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 984.924341] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 984.924781] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "[datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267/9845866c-f4a3-4a1c-a044-56cf34137267.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 984.924781] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267/9845866c-f4a3-4a1c-a044-56cf34137267.vmdk to [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a/3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 984.925736] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5ebd805a-44d9-4b19-b47d-cc35b7cb9094 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 984.931656] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 984.931656] env[62619]: value = "task-1365262" [ 984.931656] env[62619]: _type = "Task" [ 984.931656] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 984.940957] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365262, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.032572] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.032719] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.032934] env[62619]: DEBUG nova.network.neutron [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 985.076863] env[62619]: ERROR nova.scheduler.client.report [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [req-023d9746-d5ff-4573-8bc6-4044297db4e7] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-023d9746-d5ff-4573-8bc6-4044297db4e7"}]} [ 985.094438] env[62619]: DEBUG nova.scheduler.client.report [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 985.107336] env[62619]: DEBUG nova.scheduler.client.report [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 985.107571] env[62619]: DEBUG nova.compute.provider_tree [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.124097] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5246de3a-911c-ed02-e506-f3b09534ac9f, 'name': SearchDatastore_Task, 'duration_secs': 0.009989} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.125205] env[62619]: DEBUG nova.scheduler.client.report [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 985.127301] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 985.127551] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 985.127786] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.127935] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 985.128159] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 985.128693] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-39146396-1bae-4fe3-8da1-d9476037218d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.146020] env[62619]: DEBUG nova.scheduler.client.report [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 985.149761] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 985.149913] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 985.150616] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f143aaa-334c-4193-b3f0-ec58f7bb89a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.156383] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 985.156383] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dd3be9-4596-ad2e-a54c-6331dd0f86a4" [ 985.156383] env[62619]: _type = "Task" [ 985.156383] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.168482] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dd3be9-4596-ad2e-a54c-6331dd0f86a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.213118] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365261, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.180592} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.216139] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 985.217417] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f89eb407-69b0-42f7-a943-99eb5ac2f767 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.249400] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2/52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 985.252096] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-0475968c-8db6-4372-bf46-75c6cf8748c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.270630] env[62619]: DEBUG nova.compute.manager [req-b75317c6-2f5f-46e3-bd17-a8b7f086412a req-194a6995-ede9-4918-897c-cb0616bc62fc service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Received event network-changed-b2d9c045-8c75-4e70-a8de-8f4587d036a1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 985.270630] env[62619]: DEBUG nova.compute.manager [req-b75317c6-2f5f-46e3-bd17-a8b7f086412a req-194a6995-ede9-4918-897c-cb0616bc62fc service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Refreshing instance network info cache due to event network-changed-b2d9c045-8c75-4e70-a8de-8f4587d036a1. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 985.270630] env[62619]: DEBUG oslo_concurrency.lockutils [req-b75317c6-2f5f-46e3-bd17-a8b7f086412a req-194a6995-ede9-4918-897c-cb0616bc62fc service nova] Acquiring lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 985.278946] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 985.278946] env[62619]: value = "task-1365263" [ 985.278946] env[62619]: _type = "Task" [ 985.278946] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.290453] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365263, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.350114] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0de92b3-7137-4dec-a21c-3105e0ca5fa5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.361614] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1567fde-4755-4ef1-8d93-62892a1d5f55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.398561] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6d422c5-0202-4d9a-80f9-e3d42001a8cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.409086] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88ddde5e-fa53-496a-8e65-5f2d9048ba55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.428875] env[62619]: DEBUG nova.compute.provider_tree [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 985.447563] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365262, 'name': CopyVirtualDisk_Task} progress is 18%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.580099] env[62619]: DEBUG nova.network.neutron [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 985.673355] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dd3be9-4596-ad2e-a54c-6331dd0f86a4, 'name': SearchDatastore_Task, 'duration_secs': 0.063732} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 985.674369] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b87b42f9-2936-4d1e-a644-1760f1a0da2d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 985.686858] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 985.686858] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d51a7f-c329-281f-9645-7a017bc965d3" [ 985.686858] env[62619]: _type = "Task" [ 985.686858] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 985.704411] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d51a7f-c329-281f-9645-7a017bc965d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.791622] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.792897] env[62619]: DEBUG nova.network.neutron [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance_info_cache with network_info: [{"id": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "address": "fa:16:3e:6f:73:55", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d9c045-8c", "ovs_interfaceid": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 985.949357] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365262, 'name': CopyVirtualDisk_Task} progress is 40%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 985.970801] env[62619]: DEBUG nova.scheduler.client.report [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 120 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 985.971828] env[62619]: DEBUG nova.compute.provider_tree [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 120 to 121 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 985.971828] env[62619]: DEBUG nova.compute.provider_tree [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 986.205706] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d51a7f-c329-281f-9645-7a017bc965d3, 'name': SearchDatastore_Task, 'duration_secs': 0.083529} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 986.206246] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.206723] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 69e916ee-9edc-4e1b-85a0-40142364e3bb/69e916ee-9edc-4e1b-85a0-40142364e3bb.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 986.207157] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bf1345c1-2c7c-4a37-8949-66d90c8b9f33 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.218513] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 986.218513] env[62619]: value = "task-1365264" [ 986.218513] env[62619]: _type = "Task" [ 986.218513] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.229316] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.292022] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.296023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 986.296160] env[62619]: DEBUG nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Instance network_info: |[{"id": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "address": "fa:16:3e:6f:73:55", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d9c045-8c", "ovs_interfaceid": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 986.296542] env[62619]: DEBUG oslo_concurrency.lockutils [req-b75317c6-2f5f-46e3-bd17-a8b7f086412a req-194a6995-ede9-4918-897c-cb0616bc62fc service nova] Acquired lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 986.296793] env[62619]: DEBUG nova.network.neutron [req-b75317c6-2f5f-46e3-bd17-a8b7f086412a req-194a6995-ede9-4918-897c-cb0616bc62fc service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Refreshing network info cache for port b2d9c045-8c75-4e70-a8de-8f4587d036a1 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 986.298258] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:73:55', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '57c65f87-60fd-4882-ab30-31db49131b46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b2d9c045-8c75-4e70-a8de-8f4587d036a1', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 986.307111] env[62619]: DEBUG oslo.service.loopingcall [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 986.308350] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 986.308936] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-fe1dbe1a-bce5-4874-a4d2-37cab87c79be {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.334401] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 986.334401] env[62619]: value = "task-1365265" [ 986.334401] env[62619]: _type = "Task" [ 986.334401] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 986.342686] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.451940] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365262, 'name': CopyVirtualDisk_Task} progress is 57%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.477608] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.156s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.478224] env[62619]: DEBUG nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 986.482149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 5.169s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.482149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 986.482149] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 986.482149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 2.638s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.484199] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28fa8987-9e5c-4d77-ab93-03295d060a25 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.496103] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cce30f49-b81b-4b46-8d8d-a632ded321ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.513848] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4d7e152-465c-46d0-930a-86fb13d0e185 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.523746] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d717a98e-06ab-4e00-a2a3-c4a8d251d467 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 986.558396] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180243MB free_disk=148GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 986.558396] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.731376] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.780346] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 986.781411] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.004s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 986.795168] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.855294] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.950646] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365262, 'name': CopyVirtualDisk_Task} progress is 77%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 986.985275] env[62619]: DEBUG nova.compute.utils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 986.986718] env[62619]: DEBUG nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 986.987412] env[62619]: DEBUG nova.network.neutron [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 987.111126] env[62619]: DEBUG nova.policy [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3581d242034a4c8b00417545bd52ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61b5e689e5544e6857baf8d3c52fe0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 987.176283] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d88cab8b-cda4-4313-92b8-7836f4f88f3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.190522] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4cb940-0dff-4fe9-b796-f812e71e93f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.233144] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-290551db-17b4-4045-96e8-84cf3abb2fd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.247252] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4711ac71-ed49-46a4-88e5-a2b5e4ab425d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.254843] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365264, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.267424] env[62619]: DEBUG nova.compute.provider_tree [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 987.286025] env[62619]: DEBUG nova.compute.utils [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 987.291246] env[62619]: DEBUG nova.network.neutron [req-b75317c6-2f5f-46e3-bd17-a8b7f086412a req-194a6995-ede9-4918-897c-cb0616bc62fc service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updated VIF entry in instance network info cache for port b2d9c045-8c75-4e70-a8de-8f4587d036a1. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 987.291496] env[62619]: DEBUG nova.network.neutron [req-b75317c6-2f5f-46e3-bd17-a8b7f086412a req-194a6995-ede9-4918-897c-cb0616bc62fc service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance_info_cache with network_info: [{"id": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "address": "fa:16:3e:6f:73:55", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d9c045-8c", "ovs_interfaceid": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 987.303321] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365263, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.348826] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.452369] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365262, 'name': CopyVirtualDisk_Task} progress is 97%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.494238] env[62619]: DEBUG nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 987.666381] env[62619]: DEBUG nova.network.neutron [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Successfully created port: e905f2dd-5836-45a9-b32b-497591e07391 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 987.741490] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365264, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.770761] env[62619]: DEBUG nova.scheduler.client.report [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 987.797790] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.016s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 987.802146] env[62619]: DEBUG oslo_concurrency.lockutils [req-b75317c6-2f5f-46e3-bd17-a8b7f086412a req-194a6995-ede9-4918-897c-cb0616bc62fc service nova] Releasing lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 987.802607] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365263, 'name': ReconfigVM_Task, 'duration_secs': 2.367175} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.804865] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2/52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 987.805891] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-21be67e0-6ce4-478c-a89a-9ee1bd3e07ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.814812] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 987.814812] env[62619]: value = "task-1365266" [ 987.814812] env[62619]: _type = "Task" [ 987.814812] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 987.830279] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365266, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.846308] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 987.950126] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365262, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.562001} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 987.950515] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/9845866c-f4a3-4a1c-a044-56cf34137267/9845866c-f4a3-4a1c-a044-56cf34137267.vmdk to [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a/3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 987.951415] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f98a7da-c2e8-4779-a86b-c8b2692de5e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.973800] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Reconfiguring VM instance instance-0000004b to attach disk [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a/3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 987.974171] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f3c4277f-99ad-4d21-9427-7eaaa921f44b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 987.993332] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 987.993332] env[62619]: value = "task-1365267" [ 987.993332] env[62619]: _type = "Task" [ 987.993332] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.004147] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365267, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.241159] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365264, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.766079} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.241466] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 69e916ee-9edc-4e1b-85a0-40142364e3bb/69e916ee-9edc-4e1b-85a0-40142364e3bb.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 988.241717] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 988.241892] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-615c452e-17fc-4955-99b7-ff1e86fd5f23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.248376] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 988.248376] env[62619]: value = "task-1365268" [ 988.248376] env[62619]: _type = "Task" [ 988.248376] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.258218] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365268, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.326601] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365266, 'name': Rename_Task, 'duration_secs': 0.254221} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.327158] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 988.327302] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-61225b4f-3494-42e8-a599-821370a1c7b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.337335] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 988.337335] env[62619]: value = "task-1365269" [ 988.337335] env[62619]: _type = "Task" [ 988.337335] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.352496] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.357571] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.506410] env[62619]: DEBUG nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 988.509608] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365267, 'name': ReconfigVM_Task, 'duration_secs': 0.363387} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.510109] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Reconfigured VM instance instance-0000004b to attach disk [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a/3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a.vmdk or device None with type streamOptimized {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 988.510823] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7caa9318-06c4-4739-864e-d483d45fec9a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.517111] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 988.517111] env[62619]: value = "task-1365270" [ 988.517111] env[62619]: _type = "Task" [ 988.517111] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.526347] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365270, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.545559] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 988.545720] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 988.545890] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 988.550210] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 988.550435] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 988.550598] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 988.550835] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 988.551427] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 988.551427] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 988.551427] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 988.551585] env[62619]: DEBUG nova.virt.hardware [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 988.553768] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f400a586-840c-463b-82a2-1ed3942e8fa5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.562627] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd02564-dd6b-450d-ad3f-29de272cfda0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.761286] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365268, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.071202} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 988.761988] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 988.762395] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68db5201-6b7a-4bf2-abf1-647c61d71577 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.849174] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Reconfiguring VM instance instance-0000005f to attach disk [datastore1] 69e916ee-9edc-4e1b-85a0-40142364e3bb/69e916ee-9edc-4e1b-85a0-40142364e3bb.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 988.849174] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.305s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 988.849174] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-03551c3f-bbdd-4b63-b87a-dfbaf1143abf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.849174] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 2.246s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.849174] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 988.849174] env[62619]: value = "task-1365271" [ 988.849174] env[62619]: _type = "Task" [ 988.849174] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 988.849174] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365271, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.853511] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.860122] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 988.909346] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 988.909797] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 988.909915] env[62619]: INFO nova.compute.manager [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Attaching volume f9a35717-66fa-4d29-9e27-0e5ac0f81b7e to /dev/sdb [ 988.954015] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-612dc7df-4a6c-4ee9-bd46-b0896b63dc72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.961263] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93fe2980-fec3-48c3-8546-a1dd4e5cdc2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 988.973656] env[62619]: DEBUG nova.virt.block_device [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updating existing volume attachment record: e42ed616-f689-4892-95ee-1bc39b69421c {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 989.025918] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365270, 'name': Rename_Task, 'duration_secs': 0.150431} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.026240] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.026492] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-cda14e88-3820-4f8e-a388-a42e3de742d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.032232] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 989.032232] env[62619]: value = "task-1365272" [ 989.032232] env[62619]: _type = "Task" [ 989.032232] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.039821] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365272, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.304835] env[62619]: DEBUG nova.compute.manager [req-1105c383-bf7b-4bab-8688-3c8e6002f363 req-54518797-3b54-437f-8840-4dc51f9f36ba service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] Received event network-vif-plugged-e905f2dd-5836-45a9-b32b-497591e07391 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 989.305103] env[62619]: DEBUG oslo_concurrency.lockutils [req-1105c383-bf7b-4bab-8688-3c8e6002f363 req-54518797-3b54-437f-8840-4dc51f9f36ba service nova] Acquiring lock "f5560f62-634d-42e5-9354-68089db01e33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.305418] env[62619]: DEBUG oslo_concurrency.lockutils [req-1105c383-bf7b-4bab-8688-3c8e6002f363 req-54518797-3b54-437f-8840-4dc51f9f36ba service nova] Lock "f5560f62-634d-42e5-9354-68089db01e33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.305629] env[62619]: DEBUG oslo_concurrency.lockutils [req-1105c383-bf7b-4bab-8688-3c8e6002f363 req-54518797-3b54-437f-8840-4dc51f9f36ba service nova] Lock "f5560f62-634d-42e5-9354-68089db01e33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.305829] env[62619]: DEBUG nova.compute.manager [req-1105c383-bf7b-4bab-8688-3c8e6002f363 req-54518797-3b54-437f-8840-4dc51f9f36ba service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] No waiting events found dispatching network-vif-plugged-e905f2dd-5836-45a9-b32b-497591e07391 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 989.306729] env[62619]: WARNING nova.compute.manager [req-1105c383-bf7b-4bab-8688-3c8e6002f363 req-54518797-3b54-437f-8840-4dc51f9f36ba service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] Received unexpected event network-vif-plugged-e905f2dd-5836-45a9-b32b-497591e07391 for instance with vm_state building and task_state spawning. [ 989.328219] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365271, 'name': ReconfigVM_Task, 'duration_secs': 0.288653} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.328219] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Reconfigured VM instance instance-0000005f to attach disk [datastore1] 69e916ee-9edc-4e1b-85a0-40142364e3bb/69e916ee-9edc-4e1b-85a0-40142364e3bb.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 989.328620] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-215f0862-8c71-4943-ab1b-dde01ae0c8ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.335361] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 989.335361] env[62619]: value = "task-1365276" [ 989.335361] env[62619]: _type = "Task" [ 989.335361] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.352385] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365276, 'name': Rename_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.356121] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.360378] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365269, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.385675] env[62619]: INFO nova.scheduler.client.report [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted allocation for migration 9cd4bbc0-93f7-428b-889f-3b09903f6ab4 [ 989.419768] env[62619]: DEBUG nova.network.neutron [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Successfully updated port: e905f2dd-5836-45a9-b32b-497591e07391 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 989.543781] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365272, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.840285] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 9f7d7830-b878-41b9-a236-f7cd5580cf1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.840440] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 02dec4f2-cbe7-4bb0-a57e-3970c5669354 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.840562] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.840678] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 74aa214a-7eda-4613-a394-bc7477d3078e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.840817] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 04e1e5ff-6385-4c3d-a226-355a171f7de0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 256, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.840936] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.841078] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.841211] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 69e916ee-9edc-4e1b-85a0-40142364e3bb actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.842340] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 1c406554-d91d-422a-9a5a-9e910fc51103 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.842340] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance f5560f62-634d-42e5-9354-68089db01e33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 989.850992] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365276, 'name': Rename_Task, 'duration_secs': 0.13332} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 989.854780] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 989.855171] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-7fe7e4cb-f103-47f0-bb0a-a37168090438 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.860967] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.866171] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365269, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.867269] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 989.867269] env[62619]: value = "task-1365277" [ 989.867269] env[62619]: _type = "Task" [ 989.867269] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.875278] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365277, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 989.883407] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.892181] env[62619]: DEBUG oslo_concurrency.lockutils [None req-df641f87-1e79-44e2-bda3-03e32a4ae86b tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 9.486s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.893403] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.011s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.893504] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 989.895901] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 989.895901] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 989.895901] env[62619]: INFO nova.compute.manager [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Terminating instance [ 989.897265] env[62619]: DEBUG nova.compute.manager [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 989.897369] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 989.900459] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285223ce-b98a-4727-a1cb-337199d2058e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.907806] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 989.908087] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-03e4717f-875f-4642-9141-f5d9c332939c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 989.914494] env[62619]: DEBUG oslo_vmware.api [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 989.914494] env[62619]: value = "task-1365278" [ 989.914494] env[62619]: _type = "Task" [ 989.914494] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 989.924648] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "refresh_cache-f5560f62-634d-42e5-9354-68089db01e33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 989.924648] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "refresh_cache-f5560f62-634d-42e5-9354-68089db01e33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 989.924648] env[62619]: DEBUG nova.network.neutron [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 989.925598] env[62619]: DEBUG oslo_vmware.api [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365278, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.042834] env[62619]: DEBUG oslo_vmware.api [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365272, 'name': PowerOnVM_Task, 'duration_secs': 0.546046} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.044441] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 990.050131] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "48f193f6-9928-4098-8830-dadda0eb11e6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.050264] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "48f193f6-9928-4098-8830-dadda0eb11e6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.149172] env[62619]: DEBUG nova.compute.manager [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 990.150606] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8b71194-f2b5-48b7-969e-8833c79fbc76 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.351847] env[62619]: INFO nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 48f193f6-9928-4098-8830-dadda0eb11e6 has allocations against this compute host but is not found in the database. [ 990.352147] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 990.352257] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=2496MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 990.354795] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.362749] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365269, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.383018] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365277, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.422708] env[62619]: DEBUG oslo_vmware.api [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365278, 'name': PowerOffVM_Task, 'duration_secs': 0.232774} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.424935] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 990.425132] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 990.425591] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a42f9118-fcc9-4f37-ad3f-79bd155dccfa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.467520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 990.467919] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 990.471674] env[62619]: DEBUG nova.network.neutron [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 990.492433] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 990.493041] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 990.493299] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleting the datastore file [datastore1] 04e1e5ff-6385-4c3d-a226-355a171f7de0 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 990.496402] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2cec7245-4803-41a3-ba10-409bc85dbd24 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.508537] env[62619]: DEBUG oslo_vmware.api [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 990.508537] env[62619]: value = "task-1365280" [ 990.508537] env[62619]: _type = "Task" [ 990.508537] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.519535] env[62619]: DEBUG oslo_vmware.api [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365280, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.544383] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7f9be22-321c-4f58-ac31-17e017fa01f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.553017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba05cb8-c128-4c1c-b97e-d34153332758 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.556575] env[62619]: DEBUG nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 990.592924] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6e25d0-5bdc-44b0-bd9e-1716b2e1f462 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.600859] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec49ccd4-9eac-4924-9c40-d7b7c09800e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.614899] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 990.673744] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9228452a-f20b-4be5-9a20-0d0aa104d88b tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" "released" by "nova.compute.manager.ComputeManager.unshelve_instance..do_unshelve_instance" :: held 23.371s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 990.680853] env[62619]: DEBUG nova.network.neutron [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Updating instance_info_cache with network_info: [{"id": "e905f2dd-5836-45a9-b32b-497591e07391", "address": "fa:16:3e:5d:b3:7f", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape905f2dd-58", "ovs_interfaceid": "e905f2dd-5836-45a9-b32b-497591e07391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 990.853060] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365265, 'name': CreateVM_Task, 'duration_secs': 4.441133} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 990.853060] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 990.853532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 990.853705] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 990.854032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 990.854297] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a12e09e7-a43b-402a-8836-e1f0507c6193 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 990.865938] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 990.865938] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dff7ce-9165-38c8-d05e-16dd834835e0" [ 990.865938] env[62619]: _type = "Task" [ 990.865938] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 990.868427] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365269, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.877092] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dff7ce-9165-38c8-d05e-16dd834835e0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.882027] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365277, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 990.974343] env[62619]: DEBUG nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 991.024841] env[62619]: DEBUG oslo_vmware.api [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365280, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15903} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.026171] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 991.026494] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 991.026794] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 991.027090] env[62619]: INFO nova.compute.manager [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Took 1.13 seconds to destroy the instance on the hypervisor. [ 991.027525] env[62619]: DEBUG oslo.service.loopingcall [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.028198] env[62619]: DEBUG nova.compute.manager [-] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 991.028343] env[62619]: DEBUG nova.network.neutron [-] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 991.078789] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.119504] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 991.182526] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "refresh_cache-f5560f62-634d-42e5-9354-68089db01e33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.182880] env[62619]: DEBUG nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Instance network_info: |[{"id": "e905f2dd-5836-45a9-b32b-497591e07391", "address": "fa:16:3e:5d:b3:7f", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape905f2dd-58", "ovs_interfaceid": "e905f2dd-5836-45a9-b32b-497591e07391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 991.183378] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:b3:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e905f2dd-5836-45a9-b32b-497591e07391', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 991.192227] env[62619]: DEBUG oslo.service.loopingcall [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 991.192503] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5560f62-634d-42e5-9354-68089db01e33] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 991.192744] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-6d73385e-b403-43e0-9d08-45fe060a1e83 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.212540] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 991.212540] env[62619]: value = "task-1365281" [ 991.212540] env[62619]: _type = "Task" [ 991.212540] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.221765] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365281, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.331034] env[62619]: DEBUG nova.compute.manager [req-531e98e0-4d5e-4561-94a0-d844245889f5 req-13b35d84-10e5-4d1c-8588-6c992403e5ec service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] Received event network-changed-e905f2dd-5836-45a9-b32b-497591e07391 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 991.331034] env[62619]: DEBUG nova.compute.manager [req-531e98e0-4d5e-4561-94a0-d844245889f5 req-13b35d84-10e5-4d1c-8588-6c992403e5ec service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] Refreshing instance network info cache due to event network-changed-e905f2dd-5836-45a9-b32b-497591e07391. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 991.331034] env[62619]: DEBUG oslo_concurrency.lockutils [req-531e98e0-4d5e-4561-94a0-d844245889f5 req-13b35d84-10e5-4d1c-8588-6c992403e5ec service nova] Acquiring lock "refresh_cache-f5560f62-634d-42e5-9354-68089db01e33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.331221] env[62619]: DEBUG oslo_concurrency.lockutils [req-531e98e0-4d5e-4561-94a0-d844245889f5 req-13b35d84-10e5-4d1c-8588-6c992403e5ec service nova] Acquired lock "refresh_cache-f5560f62-634d-42e5-9354-68089db01e33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.331389] env[62619]: DEBUG nova.network.neutron [req-531e98e0-4d5e-4561-94a0-d844245889f5 req-13b35d84-10e5-4d1c-8588-6c992403e5ec service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] Refreshing network info cache for port e905f2dd-5836-45a9-b32b-497591e07391 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 991.365533] env[62619]: DEBUG oslo_vmware.api [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365269, 'name': PowerOnVM_Task, 'duration_secs': 2.866505} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.365826] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.366069] env[62619]: INFO nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Took 12.36 seconds to spawn the instance on the hypervisor. [ 991.366665] env[62619]: DEBUG nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.367117] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bf2bd8ac-99e2-45bb-b6ce-47d0e9119c35 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.384814] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dff7ce-9165-38c8-d05e-16dd834835e0, 'name': SearchDatastore_Task, 'duration_secs': 0.018481} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.385380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 991.385624] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 991.385859] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.386049] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.386199] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 991.389147] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f88b5ce-84eb-4835-b6be-d727fedf4a88 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.390913] env[62619]: DEBUG oslo_vmware.api [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365277, 'name': PowerOnVM_Task, 'duration_secs': 1.338272} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.391178] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 991.391380] env[62619]: INFO nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Took 10.16 seconds to spawn the instance on the hypervisor. [ 991.391560] env[62619]: DEBUG nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 991.392625] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee897d07-8dce-4845-bfbc-5b7f7fab082a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.409014] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 991.409014] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 991.409014] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4a224e78-e2bc-4c4c-b9ee-b637ffe2318c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.411811] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 991.411811] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520ba5ba-4bbd-219b-701e-56b8e8ee79e2" [ 991.411811] env[62619]: _type = "Task" [ 991.411811] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.421713] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520ba5ba-4bbd-219b-701e-56b8e8ee79e2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.497804] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 991.624036] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 991.624036] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.820s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 991.624505] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.546s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 991.626025] env[62619]: INFO nova.compute.claims [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 991.722046] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365281, 'name': CreateVM_Task, 'duration_secs': 0.363381} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.722046] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f5560f62-634d-42e5-9354-68089db01e33] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 991.722493] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 991.722668] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 991.723022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 991.723286] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-976bfa6f-b391-45b6-a102-f159e0bbc2bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.727401] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 991.727401] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ac626c-bef0-5ec5-8999-d96e99e2a547" [ 991.727401] env[62619]: _type = "Task" [ 991.727401] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.734810] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ac626c-bef0-5ec5-8999-d96e99e2a547, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 991.835956] env[62619]: DEBUG nova.network.neutron [-] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 991.892626] env[62619]: INFO nova.compute.manager [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Took 22.30 seconds to build instance. [ 991.910595] env[62619]: INFO nova.compute.manager [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Took 21.51 seconds to build instance. [ 991.925332] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520ba5ba-4bbd-219b-701e-56b8e8ee79e2, 'name': SearchDatastore_Task, 'duration_secs': 0.011187} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 991.926785] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-490852f9-8610-4bf2-8e9d-5c522c94c757 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 991.932458] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 991.932458] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524365dc-c075-13aa-a359-4144ff142033" [ 991.932458] env[62619]: _type = "Task" [ 991.932458] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 991.942960] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524365dc-c075-13aa-a359-4144ff142033, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.058312] env[62619]: DEBUG nova.network.neutron [req-531e98e0-4d5e-4561-94a0-d844245889f5 req-13b35d84-10e5-4d1c-8588-6c992403e5ec service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] Updated VIF entry in instance network info cache for port e905f2dd-5836-45a9-b32b-497591e07391. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 992.058701] env[62619]: DEBUG nova.network.neutron [req-531e98e0-4d5e-4561-94a0-d844245889f5 req-13b35d84-10e5-4d1c-8588-6c992403e5ec service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] Updating instance_info_cache with network_info: [{"id": "e905f2dd-5836-45a9-b32b-497591e07391", "address": "fa:16:3e:5d:b3:7f", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape905f2dd-58", "ovs_interfaceid": "e905f2dd-5836-45a9-b32b-497591e07391", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 992.087079] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cb01d8-a050-45ae-906d-c86a9063d818 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.093937] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd7201d-55d4-41d6-9016-0cb9412e17b6 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Suspending the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1162}} [ 992.094229] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.SuspendVM_Task with opID=oslo.vmware-8a393376-326f-4301-bc02-af472c1b8319 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.102102] env[62619]: DEBUG oslo_vmware.api [None req-fbd7201d-55d4-41d6-9016-0cb9412e17b6 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 992.102102] env[62619]: value = "task-1365283" [ 992.102102] env[62619]: _type = "Task" [ 992.102102] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.110248] env[62619]: DEBUG oslo_vmware.api [None req-fbd7201d-55d4-41d6-9016-0cb9412e17b6 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365283, 'name': SuspendVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.238081] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52ac626c-bef0-5ec5-8999-d96e99e2a547, 'name': SearchDatastore_Task, 'duration_secs': 0.026455} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.238419] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.238655] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 992.238867] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 992.340891] env[62619]: INFO nova.compute.manager [-] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Took 1.31 seconds to deallocate network for instance. [ 992.394371] env[62619]: DEBUG oslo_concurrency.lockutils [None req-503d970b-a03b-4f97-a27a-5ea19a8c89ab tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.812s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.412731] env[62619]: DEBUG oslo_concurrency.lockutils [None req-74125cd5-f4a8-4991-851c-d26a8f30e611 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "69e916ee-9edc-4e1b-85a0-40142364e3bb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.019s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 992.449476] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524365dc-c075-13aa-a359-4144ff142033, 'name': SearchDatastore_Task, 'duration_secs': 0.033791} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 992.450031] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.450317] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 1c406554-d91d-422a-9a5a-9e910fc51103/1c406554-d91d-422a-9a5a-9e910fc51103.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 992.450622] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 992.450816] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 992.451481] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-70f54acf-2e72-404e-a9dc-44e8ef29a814 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.454806] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9b5ab97f-ef7b-4707-b6c7-5cdcaad88358 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.463022] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 992.463022] env[62619]: value = "task-1365284" [ 992.463022] env[62619]: _type = "Task" [ 992.463022] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.469723] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 992.470052] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 992.472045] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1a717e8c-8e82-4705-bd1e-04ae85526d09 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.489727] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365284, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.493512] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 992.493512] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e0acd-bd81-ac6c-b33b-11b122d298b2" [ 992.493512] env[62619]: _type = "Task" [ 992.493512] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 992.505154] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e0acd-bd81-ac6c-b33b-11b122d298b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.562578] env[62619]: DEBUG oslo_concurrency.lockutils [req-531e98e0-4d5e-4561-94a0-d844245889f5 req-13b35d84-10e5-4d1c-8588-6c992403e5ec service nova] Releasing lock "refresh_cache-f5560f62-634d-42e5-9354-68089db01e33" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 992.613353] env[62619]: DEBUG oslo_vmware.api [None req-fbd7201d-55d4-41d6-9016-0cb9412e17b6 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365283, 'name': SuspendVM_Task} progress is 70%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 992.829612] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76317d2c-3f2d-418d-b885-5a643d480977 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.837196] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6ed24c0-1688-4745-bc06-ec7fbfb05eb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.868144] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 992.869270] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdc608cd-6380-4758-aa11-b3f0b08bbda9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.877314] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5beac0ca-06fa-4be1-b5bf-111c1c4c8921 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 992.890924] env[62619]: DEBUG nova.compute.provider_tree [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 992.973693] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365284, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.003203] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]527e0acd-bd81-ac6c-b33b-11b122d298b2, 'name': SearchDatastore_Task, 'duration_secs': 0.014282} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.004593] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c05e07c3-1b4e-48c1-a288-c8e29f090294 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.010202] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 993.010202] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d8d391-7c73-6207-faf1-33f264d217e9" [ 993.010202] env[62619]: _type = "Task" [ 993.010202] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.017643] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d8d391-7c73-6207-faf1-33f264d217e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.112508] env[62619]: DEBUG oslo_vmware.api [None req-fbd7201d-55d4-41d6-9016-0cb9412e17b6 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365283, 'name': SuspendVM_Task, 'duration_secs': 0.594313} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 993.112791] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-fbd7201d-55d4-41d6-9016-0cb9412e17b6 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Suspended the VM {{(pid=62619) suspend /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1166}} [ 993.112998] env[62619]: DEBUG nova.compute.manager [None req-fbd7201d-55d4-41d6-9016-0cb9412e17b6 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 993.113807] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28201492-423b-4f9f-849b-a552e95b4787 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.125932] env[62619]: DEBUG nova.compute.manager [req-8a0479c3-de12-411b-8029-6036415d50fc req-391e5e6b-69b9-4bd1-96ea-3e1a42b835f9 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Received event network-changed-7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.125932] env[62619]: DEBUG nova.compute.manager [req-8a0479c3-de12-411b-8029-6036415d50fc req-391e5e6b-69b9-4bd1-96ea-3e1a42b835f9 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Refreshing instance network info cache due to event network-changed-7c383235-8fc6-41ec-b559-12d1ff143a09. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 993.125932] env[62619]: DEBUG oslo_concurrency.lockutils [req-8a0479c3-de12-411b-8029-6036415d50fc req-391e5e6b-69b9-4bd1-96ea-3e1a42b835f9 service nova] Acquiring lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.125932] env[62619]: DEBUG oslo_concurrency.lockutils [req-8a0479c3-de12-411b-8029-6036415d50fc req-391e5e6b-69b9-4bd1-96ea-3e1a42b835f9 service nova] Acquired lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.126238] env[62619]: DEBUG nova.network.neutron [req-8a0479c3-de12-411b-8029-6036415d50fc req-391e5e6b-69b9-4bd1-96ea-3e1a42b835f9 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Refreshing network info cache for port 7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 993.370139] env[62619]: DEBUG nova.compute.manager [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Received event network-vif-deleted-c2f7c57e-c31f-4cd0-81f8-d3a348d7b3a3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.370363] env[62619]: DEBUG nova.compute.manager [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Received event network-changed-b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 993.370587] env[62619]: DEBUG nova.compute.manager [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Refreshing instance network info cache due to event network-changed-b8ca3257-e811-48cd-ac4a-662b49bf41f3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 993.370723] env[62619]: DEBUG oslo_concurrency.lockutils [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] Acquiring lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 993.370966] env[62619]: DEBUG oslo_concurrency.lockutils [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] Acquired lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 993.371113] env[62619]: DEBUG nova.network.neutron [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Refreshing network info cache for port b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 993.411940] env[62619]: ERROR nova.scheduler.client.report [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [req-2e3c8bda-c104-4dcf-af15-d23fd9c62e2f] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-2e3c8bda-c104-4dcf-af15-d23fd9c62e2f"}]} [ 993.430719] env[62619]: DEBUG nova.scheduler.client.report [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 993.461828] env[62619]: DEBUG nova.scheduler.client.report [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 993.462050] env[62619]: DEBUG nova.compute.provider_tree [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 993.473106] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365284, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.478207] env[62619]: DEBUG nova.scheduler.client.report [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 993.497160] env[62619]: DEBUG nova.scheduler.client.report [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 993.519502] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 993.519778] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290542', 'volume_id': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'name': 'volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '74aa214a-7eda-4613-a394-bc7477d3078e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'serial': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 993.520650] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c0a11bc-9b67-4c88-ae89-5f58d01e072c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.526800] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d8d391-7c73-6207-faf1-33f264d217e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.544013] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ebabb034-f1a3-4020-91b2-c7cd8cf945a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.568660] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Reconfiguring VM instance instance-00000059 to attach disk [datastore1] volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e/volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 993.571532] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a2c47921-d34b-4ae8-993b-d781e21a49ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.589858] env[62619]: DEBUG oslo_vmware.api [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 993.589858] env[62619]: value = "task-1365285" [ 993.589858] env[62619]: _type = "Task" [ 993.589858] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 993.597922] env[62619]: DEBUG oslo_vmware.api [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365285, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 993.700155] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-667fd7a9-8024-47be-9eb3-883cc4265b1e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.707759] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-687d8e6c-ff25-42db-abe4-a71dfe5280b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.741934] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4129f8b-4223-4055-b5a8-86919b8f0b84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.750073] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24e0aea2-e61b-43d5-a8a9-7fe3e0220e30 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 993.763290] env[62619]: DEBUG nova.compute.provider_tree [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 993.898664] env[62619]: DEBUG nova.network.neutron [req-8a0479c3-de12-411b-8029-6036415d50fc req-391e5e6b-69b9-4bd1-96ea-3e1a42b835f9 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updated VIF entry in instance network info cache for port 7c383235-8fc6-41ec-b559-12d1ff143a09. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 993.899052] env[62619]: DEBUG nova.network.neutron [req-8a0479c3-de12-411b-8029-6036415d50fc req-391e5e6b-69b9-4bd1-96ea-3e1a42b835f9 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating instance_info_cache with network_info: [{"id": "7c383235-8fc6-41ec-b559-12d1ff143a09", "address": "fa:16:3e:08:41:51", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c383235-8f", "ovs_interfaceid": "7c383235-8fc6-41ec-b559-12d1ff143a09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 993.974938] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365284, 'name': CopyVirtualDisk_Task} progress is 4%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.020074] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d8d391-7c73-6207-faf1-33f264d217e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.099016] env[62619]: DEBUG oslo_vmware.api [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365285, 'name': ReconfigVM_Task, 'duration_secs': 0.334823} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.099816] env[62619]: DEBUG nova.network.neutron [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updated VIF entry in instance network info cache for port b8ca3257-e811-48cd-ac4a-662b49bf41f3. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 994.100158] env[62619]: DEBUG nova.network.neutron [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updating instance_info_cache with network_info: [{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 994.101338] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Reconfigured VM instance instance-00000059 to attach disk [datastore1] volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e/volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 994.106419] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e6e2c708-024e-473f-b4a7-54abf3e7fc30 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.125447] env[62619]: DEBUG oslo_vmware.api [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 994.125447] env[62619]: value = "task-1365286" [ 994.125447] env[62619]: _type = "Task" [ 994.125447] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.133974] env[62619]: DEBUG oslo_vmware.api [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365286, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.296230] env[62619]: DEBUG nova.scheduler.client.report [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 124 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 994.296672] env[62619]: DEBUG nova.compute.provider_tree [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 124 to 125 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 994.296996] env[62619]: DEBUG nova.compute.provider_tree [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 994.401626] env[62619]: DEBUG oslo_concurrency.lockutils [req-8a0479c3-de12-411b-8029-6036415d50fc req-391e5e6b-69b9-4bd1-96ea-3e1a42b835f9 service nova] Releasing lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.476212] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365284, 'name': CopyVirtualDisk_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.522263] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52d8d391-7c73-6207-faf1-33f264d217e9, 'name': SearchDatastore_Task, 'duration_secs': 1.372084} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.522263] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.522263] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] f5560f62-634d-42e5-9354-68089db01e33/f5560f62-634d-42e5-9354-68089db01e33.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 994.522625] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f24be563-062d-403f-b8cc-21f2f25e2f69 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.530215] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 994.530215] env[62619]: value = "task-1365287" [ 994.530215] env[62619]: _type = "Task" [ 994.530215] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.538889] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365287, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 994.607860] env[62619]: DEBUG oslo_concurrency.lockutils [req-e810a517-6913-4b88-a901-45fbf90086bc req-c449944d-140d-479a-90d8-1a9347729701 service nova] Releasing lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 994.635172] env[62619]: DEBUG oslo_vmware.api [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365286, 'name': ReconfigVM_Task, 'duration_secs': 0.139045} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.635491] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290542', 'volume_id': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'name': 'volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '74aa214a-7eda-4613-a394-bc7477d3078e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'serial': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 994.806274] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 3.182s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 994.806884] env[62619]: DEBUG nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 994.809578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 3.312s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 994.811143] env[62619]: INFO nova.compute.claims [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 994.838080] env[62619]: INFO nova.compute.manager [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Resuming [ 994.838733] env[62619]: DEBUG nova.objects.instance [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lazy-loading 'flavor' on Instance uuid 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 994.976085] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365284, 'name': CopyVirtualDisk_Task, 'duration_secs': 2.168899} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 994.976472] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 1c406554-d91d-422a-9a5a-9e910fc51103/1c406554-d91d-422a-9a5a-9e910fc51103.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 994.976698] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 994.976935] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d90148de-f6c1-4a88-ac15-18bfb9daeaf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.984380] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 994.984380] env[62619]: value = "task-1365288" [ 994.984380] env[62619]: _type = "Task" [ 994.984380] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 994.993178] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365288, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.041756] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365287, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.508549} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.042144] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] f5560f62-634d-42e5-9354-68089db01e33/f5560f62-634d-42e5-9354-68089db01e33.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 995.042447] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 995.042760] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-1e054625-4ec7-4c45-b2a2-97c6b7328d7f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.049451] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 995.049451] env[62619]: value = "task-1365289" [ 995.049451] env[62619]: _type = "Task" [ 995.049451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.057326] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365289, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.315669] env[62619]: DEBUG nova.compute.utils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 995.318845] env[62619]: DEBUG nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 995.319018] env[62619]: DEBUG nova.network.neutron [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 995.377398] env[62619]: DEBUG nova.policy [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '630db0e9ce9d4b358b2f46cf1a1c22ae', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '90f89d92151a4d5bacc00e6bdda88aca', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 995.493920] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365288, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.07222} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.494251] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.495040] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-073ea2f0-2a0e-40e4-8eae-7483acd7e2fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.517636] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 1c406554-d91d-422a-9a5a-9e910fc51103/1c406554-d91d-422a-9a5a-9e910fc51103.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.517930] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1df4256a-5659-4c96-8829-7da9e664a54c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.537663] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 995.537663] env[62619]: value = "task-1365290" [ 995.537663] env[62619]: _type = "Task" [ 995.537663] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.547390] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365290, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.558355] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365289, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.057106} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 995.558472] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 995.559183] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-824e654b-f2f4-4808-8c93-b345d3130880 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.580546] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Reconfiguring VM instance instance-00000061 to attach disk [datastore1] f5560f62-634d-42e5-9354-68089db01e33/f5560f62-634d-42e5-9354-68089db01e33.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 995.580753] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-1a340ff6-f3f3-40ff-b42e-8aca44b65827 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.598841] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 995.598841] env[62619]: value = "task-1365291" [ 995.598841] env[62619]: _type = "Task" [ 995.598841] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 995.606177] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365291, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 995.637022] env[62619]: DEBUG nova.network.neutron [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Successfully created port: b59999a8-a7e9-4ef1-9f72-7eebd30b174b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 995.671260] env[62619]: DEBUG nova.objects.instance [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 995.821631] env[62619]: DEBUG nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 995.845566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 995.845758] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquired lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 995.845932] env[62619]: DEBUG nova.network.neutron [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 995.987999] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d0b9af1-9f79-488f-a316-f5217747e626 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.995309] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05b90ce6-b8c8-42e2-a9d9-2dec500112e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.028138] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09eb0100-bf84-43ee-965d-c95d9dd220ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.036038] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6aab1627-bb06-43ba-970f-4c2e05a0e2de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.052568] env[62619]: DEBUG nova.compute.provider_tree [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 996.056876] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365290, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.109525] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365291, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.177037] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4d36e558-1542-4b7f-9947-a486843143e7 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.267s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 996.368933] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 996.369292] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 996.369489] env[62619]: DEBUG nova.compute.manager [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 996.370624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aa17ccb-b450-45f3-9af5-9e994666cdbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.377475] env[62619]: DEBUG nova.compute.manager [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 996.377890] env[62619]: DEBUG nova.objects.instance [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 996.550262] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365290, 'name': ReconfigVM_Task, 'duration_secs': 0.925621} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.550262] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 1c406554-d91d-422a-9a5a-9e910fc51103/1c406554-d91d-422a-9a5a-9e910fc51103.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.551479] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-c93e4c11-e64b-4f91-a67f-df5ed43b4e1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.557376] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 996.557376] env[62619]: value = "task-1365292" [ 996.557376] env[62619]: _type = "Task" [ 996.557376] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.570067] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365292, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.593275] env[62619]: DEBUG nova.scheduler.client.report [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 125 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 996.593559] env[62619]: DEBUG nova.compute.provider_tree [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 125 to 126 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 996.593746] env[62619]: DEBUG nova.compute.provider_tree [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 996.610693] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365291, 'name': ReconfigVM_Task, 'duration_secs': 0.874785} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 996.611032] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Reconfigured VM instance instance-00000061 to attach disk [datastore1] f5560f62-634d-42e5-9354-68089db01e33/f5560f62-634d-42e5-9354-68089db01e33.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 996.611618] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-7f778fe0-4fc7-406f-82df-83893542dc72 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.619439] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 996.619439] env[62619]: value = "task-1365293" [ 996.619439] env[62619]: _type = "Task" [ 996.619439] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.628030] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365293, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 996.751343] env[62619]: DEBUG nova.network.neutron [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [{"id": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "address": "fa:16:3e:2e:a8:22", "network": {"id": "15e014b4-1e51-427a-9c0f-67abc8db5425", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-2131940789-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "578df9b6434d416fbae5f3cf2c33ef1b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "447ff42d-b33e-4b5d-8b7f-e8117ebbbc92", "external-id": "nsx-vlan-transportzone-836", "segmentation_id": 836, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8c7fa88a-7d", "ovs_interfaceid": "8c7fa88a-7dbd-49cf-b490-e311fa9a804e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 996.832052] env[62619]: DEBUG nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 996.855713] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 996.855969] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 996.856143] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 996.856332] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 996.856487] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 996.856639] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 996.856847] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 996.857017] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 996.857191] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 996.857355] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 996.857526] env[62619]: DEBUG nova.virt.hardware [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 996.858389] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78747385-53d2-49b3-baf9-2595b95d95a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.866091] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0cd9511b-3f5c-4dd0-928a-40736434dc60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.884633] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 996.884868] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-806ba95f-1b4b-4529-bc8d-865772a0fa75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 996.890783] env[62619]: DEBUG oslo_vmware.api [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 996.890783] env[62619]: value = "task-1365294" [ 996.890783] env[62619]: _type = "Task" [ 996.890783] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 996.898465] env[62619]: DEBUG oslo_vmware.api [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365294, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.021687] env[62619]: DEBUG nova.compute.manager [req-a95c5f8a-84f2-410a-86b1-62fd631bfe93 req-458e9e70-db8c-44e4-ad86-d5f16abff091 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Received event network-vif-plugged-b59999a8-a7e9-4ef1-9f72-7eebd30b174b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 997.022029] env[62619]: DEBUG oslo_concurrency.lockutils [req-a95c5f8a-84f2-410a-86b1-62fd631bfe93 req-458e9e70-db8c-44e4-ad86-d5f16abff091 service nova] Acquiring lock "48f193f6-9928-4098-8830-dadda0eb11e6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 997.022350] env[62619]: DEBUG oslo_concurrency.lockutils [req-a95c5f8a-84f2-410a-86b1-62fd631bfe93 req-458e9e70-db8c-44e4-ad86-d5f16abff091 service nova] Lock "48f193f6-9928-4098-8830-dadda0eb11e6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.022553] env[62619]: DEBUG oslo_concurrency.lockutils [req-a95c5f8a-84f2-410a-86b1-62fd631bfe93 req-458e9e70-db8c-44e4-ad86-d5f16abff091 service nova] Lock "48f193f6-9928-4098-8830-dadda0eb11e6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.022734] env[62619]: DEBUG nova.compute.manager [req-a95c5f8a-84f2-410a-86b1-62fd631bfe93 req-458e9e70-db8c-44e4-ad86-d5f16abff091 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] No waiting events found dispatching network-vif-plugged-b59999a8-a7e9-4ef1-9f72-7eebd30b174b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 997.022906] env[62619]: WARNING nova.compute.manager [req-a95c5f8a-84f2-410a-86b1-62fd631bfe93 req-458e9e70-db8c-44e4-ad86-d5f16abff091 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Received unexpected event network-vif-plugged-b59999a8-a7e9-4ef1-9f72-7eebd30b174b for instance with vm_state building and task_state spawning. [ 997.067515] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365292, 'name': Rename_Task, 'duration_secs': 0.129703} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.067922] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.068190] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-0b263f60-d2d8-4792-bbd8-c5aa6ef28f34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.075452] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 997.075452] env[62619]: value = "task-1365295" [ 997.075452] env[62619]: _type = "Task" [ 997.075452] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.082810] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365295, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.099276] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.290s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.099844] env[62619]: DEBUG nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 997.105222] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 4.236s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 997.105222] env[62619]: DEBUG nova.objects.instance [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lazy-loading 'resources' on Instance uuid 04e1e5ff-6385-4c3d-a226-355a171f7de0 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 997.110716] env[62619]: DEBUG nova.network.neutron [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Successfully updated port: b59999a8-a7e9-4ef1-9f72-7eebd30b174b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 997.132223] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365293, 'name': Rename_Task, 'duration_secs': 0.129055} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.132520] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 997.132787] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-aa4408a5-28f2-494b-ace1-016f65534cd7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.139501] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 997.139501] env[62619]: value = "task-1365296" [ 997.139501] env[62619]: _type = "Task" [ 997.139501] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.150635] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365296, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.254270] env[62619]: DEBUG oslo_concurrency.lockutils [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Releasing lock "refresh_cache-3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 997.255274] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27329502-768e-421d-b417-bdcdf06d8ce5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.262092] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Resuming the VM {{(pid=62619) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1183}} [ 997.262368] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-104c3df6-2dbb-4bf1-9af6-53a155614bf1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.268034] env[62619]: DEBUG oslo_vmware.api [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 997.268034] env[62619]: value = "task-1365297" [ 997.268034] env[62619]: _type = "Task" [ 997.268034] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 997.277440] env[62619]: DEBUG oslo_vmware.api [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365297, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.400560] env[62619]: DEBUG oslo_vmware.api [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365294, 'name': PowerOffVM_Task, 'duration_secs': 0.279761} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.400817] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 997.401030] env[62619]: DEBUG nova.compute.manager [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 997.401906] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbe8ae55-3f8c-4a96-b677-056eab130234 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.585998] env[62619]: DEBUG oslo_vmware.api [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365295, 'name': PowerOnVM_Task, 'duration_secs': 0.506692} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 997.586397] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 997.586647] env[62619]: INFO nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Took 14.02 seconds to spawn the instance on the hypervisor. [ 997.586844] env[62619]: DEBUG nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 997.587728] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c63e1f6b-328c-4e24-a209-be620c98ada3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.604851] env[62619]: DEBUG nova.compute.utils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 997.606261] env[62619]: DEBUG nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 997.606410] env[62619]: DEBUG nova.network.neutron [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 997.615311] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "refresh_cache-48f193f6-9928-4098-8830-dadda0eb11e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 997.615311] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquired lock "refresh_cache-48f193f6-9928-4098-8830-dadda0eb11e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 997.615311] env[62619]: DEBUG nova.network.neutron [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 997.649236] env[62619]: DEBUG nova.policy [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a1ab4be10d444359a7a3b245ec9b9ea0', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c583f4e7b29743aabd3e96f7c53fa04f', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 997.654274] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365296, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.779963] env[62619]: DEBUG oslo_vmware.api [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365297, 'name': PowerOnVM_Task} progress is 93%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 997.782096] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b169885-cda7-443c-9134-f6f6abf9cb0e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.790274] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06d5c0e7-b015-482e-9f56-1ef43479e86d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.821797] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8fecf777-ca3d-4b34-ab95-d50d4b231dbb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.830169] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-096fa043-32fd-49a5-8ea0-f8ee4bea62ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 997.846030] env[62619]: DEBUG nova.compute.provider_tree [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 997.915840] env[62619]: DEBUG oslo_concurrency.lockutils [None req-cb8b2d07-8bd7-4272-84b4-7e1c4bddf113 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.546s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 997.954384] env[62619]: DEBUG nova.network.neutron [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Successfully created port: b9b9172c-1cfb-4734-a287-219ce108c9d8 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 998.105039] env[62619]: INFO nova.compute.manager [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Took 25.42 seconds to build instance. [ 998.112186] env[62619]: DEBUG nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 998.152863] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365296, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.177121] env[62619]: DEBUG nova.network.neutron [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 998.278170] env[62619]: DEBUG oslo_vmware.api [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365297, 'name': PowerOnVM_Task, 'duration_secs': 0.550449} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 998.278560] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Resumed the VM {{(pid=62619) resume /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1188}} [ 998.278808] env[62619]: DEBUG nova.compute.manager [None req-54ed89ca-0c35-411a-98eb-edb51c50ccae tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 998.279640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29a85711-1aba-4f6a-9ba6-7ab75696c281 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.349391] env[62619]: DEBUG nova.scheduler.client.report [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 998.409294] env[62619]: DEBUG nova.network.neutron [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Updating instance_info_cache with network_info: [{"id": "b59999a8-a7e9-4ef1-9f72-7eebd30b174b", "address": "fa:16:3e:40:04:7c", "network": {"id": "0799b3d7-1bb5-4a7a-a2eb-aadc151c3e81", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1258328438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90f89d92151a4d5bacc00e6bdda88aca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb59999a8-a7", "ovs_interfaceid": "b59999a8-a7e9-4ef1-9f72-7eebd30b174b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 998.607459] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ef49620-3b5d-4eec-abd1-578917b87771 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 26.934s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.650959] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365296, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 998.789531] env[62619]: DEBUG nova.objects.instance [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 998.853818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.750s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 998.877160] env[62619]: INFO nova.scheduler.client.report [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted allocations for instance 04e1e5ff-6385-4c3d-a226-355a171f7de0 [ 998.912802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Releasing lock "refresh_cache-48f193f6-9928-4098-8830-dadda0eb11e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 998.913283] env[62619]: DEBUG nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Instance network_info: |[{"id": "b59999a8-a7e9-4ef1-9f72-7eebd30b174b", "address": "fa:16:3e:40:04:7c", "network": {"id": "0799b3d7-1bb5-4a7a-a2eb-aadc151c3e81", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1258328438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90f89d92151a4d5bacc00e6bdda88aca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb59999a8-a7", "ovs_interfaceid": "b59999a8-a7e9-4ef1-9f72-7eebd30b174b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 998.913940] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:40:04:7c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '1cb94a1a-f287-46e7-b63b-ec692c2141b4', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b59999a8-a7e9-4ef1-9f72-7eebd30b174b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 998.921332] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Creating folder: Project (90f89d92151a4d5bacc00e6bdda88aca). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 998.922346] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6a253ed7-b547-46ed-9871-0d8813172919 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.933834] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Created folder: Project (90f89d92151a4d5bacc00e6bdda88aca) in parent group-v290436. [ 998.934036] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Creating folder: Instances. Parent ref: group-v290544. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 998.934283] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1e23f082-9efc-4f12-944c-0664f4ab8b59 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.943709] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Created folder: Instances in parent group-v290544. [ 998.943942] env[62619]: DEBUG oslo.service.loopingcall [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 998.944205] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 998.944535] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5cd67daf-ac25-4a14-8be9-698529ed8ffe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 998.964497] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 998.964497] env[62619]: value = "task-1365300" [ 998.964497] env[62619]: _type = "Task" [ 998.964497] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 998.972118] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365300, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.047848] env[62619]: DEBUG nova.compute.manager [req-1597c03d-0b47-4f83-a72c-35e6d1e4cd4e req-94059f68-8dd1-4cf9-91d3-986b325b9437 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Received event network-changed-b59999a8-a7e9-4ef1-9f72-7eebd30b174b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 999.048057] env[62619]: DEBUG nova.compute.manager [req-1597c03d-0b47-4f83-a72c-35e6d1e4cd4e req-94059f68-8dd1-4cf9-91d3-986b325b9437 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Refreshing instance network info cache due to event network-changed-b59999a8-a7e9-4ef1-9f72-7eebd30b174b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 999.048295] env[62619]: DEBUG oslo_concurrency.lockutils [req-1597c03d-0b47-4f83-a72c-35e6d1e4cd4e req-94059f68-8dd1-4cf9-91d3-986b325b9437 service nova] Acquiring lock "refresh_cache-48f193f6-9928-4098-8830-dadda0eb11e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.048444] env[62619]: DEBUG oslo_concurrency.lockutils [req-1597c03d-0b47-4f83-a72c-35e6d1e4cd4e req-94059f68-8dd1-4cf9-91d3-986b325b9437 service nova] Acquired lock "refresh_cache-48f193f6-9928-4098-8830-dadda0eb11e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.048610] env[62619]: DEBUG nova.network.neutron [req-1597c03d-0b47-4f83-a72c-35e6d1e4cd4e req-94059f68-8dd1-4cf9-91d3-986b325b9437 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Refreshing network info cache for port b59999a8-a7e9-4ef1-9f72-7eebd30b174b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 999.122867] env[62619]: DEBUG nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 999.148235] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 999.148500] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 999.148674] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 999.148859] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 999.149051] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 999.149163] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 999.149369] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 999.149529] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 999.149694] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 999.149858] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 999.150053] env[62619]: DEBUG nova.virt.hardware [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 999.150836] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f339ad-4ec8-4998-ad92-b344917c2e77 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.156386] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365296, 'name': PowerOnVM_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.161816] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92be7c96-f8aa-4e01-a81a-ffbec20dde1d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.260239] env[62619]: DEBUG nova.compute.manager [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Stashing vm_state: active {{(pid=62619) _prep_resize /opt/stack/nova/nova/compute/manager.py:5624}} [ 999.294286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.294581] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.294811] env[62619]: DEBUG nova.network.neutron [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 999.295048] env[62619]: DEBUG nova.objects.instance [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'info_cache' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 999.385215] env[62619]: DEBUG oslo_concurrency.lockutils [None req-dccac4e2-ea44-46ea-a4f1-20321135ffb9 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "04e1e5ff-6385-4c3d-a226-355a171f7de0" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 9.492s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 999.475736] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365300, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 999.651473] env[62619]: DEBUG oslo_vmware.api [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365296, 'name': PowerOnVM_Task, 'duration_secs': 2.070522} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.653886] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 999.654121] env[62619]: INFO nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Took 11.15 seconds to spawn the instance on the hypervisor. [ 999.654307] env[62619]: DEBUG nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 999.655086] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae388a3b-12aa-4a87-b591-cbb68b89c04d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.725556] env[62619]: DEBUG nova.network.neutron [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Successfully updated port: b9b9172c-1cfb-4734-a287-219ce108c9d8 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 999.779907] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 999.780194] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 999.801913] env[62619]: DEBUG nova.objects.base [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Object Instance<74aa214a-7eda-4613-a394-bc7477d3078e> lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 999.829106] env[62619]: DEBUG nova.network.neutron [req-1597c03d-0b47-4f83-a72c-35e6d1e4cd4e req-94059f68-8dd1-4cf9-91d3-986b325b9437 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Updated VIF entry in instance network info cache for port b59999a8-a7e9-4ef1-9f72-7eebd30b174b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 999.829478] env[62619]: DEBUG nova.network.neutron [req-1597c03d-0b47-4f83-a72c-35e6d1e4cd4e req-94059f68-8dd1-4cf9-91d3-986b325b9437 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Updating instance_info_cache with network_info: [{"id": "b59999a8-a7e9-4ef1-9f72-7eebd30b174b", "address": "fa:16:3e:40:04:7c", "network": {"id": "0799b3d7-1bb5-4a7a-a2eb-aadc151c3e81", "bridge": "br-int", "label": "tempest-AttachInterfacesV270Test-1258328438-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "90f89d92151a4d5bacc00e6bdda88aca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "1cb94a1a-f287-46e7-b63b-ec692c2141b4", "external-id": "nsx-vlan-transportzone-346", "segmentation_id": 346, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb59999a8-a7", "ovs_interfaceid": "b59999a8-a7e9-4ef1-9f72-7eebd30b174b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 999.976175] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365300, 'name': CreateVM_Task, 'duration_secs': 0.63555} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 999.976359] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 999.977066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 999.977251] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 999.977578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 999.977844] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-26d7a04c-a296-4199-a760-b5f0a11ee5dc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 999.982422] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 999.982422] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52335443-092d-322d-01e9-99246788b187" [ 999.982422] env[62619]: _type = "Task" [ 999.982422] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 999.990202] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52335443-092d-322d-01e9-99246788b187, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.178085] env[62619]: INFO nova.compute.manager [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Took 22.31 seconds to build instance. [ 1000.235233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "refresh_cache-fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.235233] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "refresh_cache-fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.235233] env[62619]: DEBUG nova.network.neutron [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1000.286963] env[62619]: INFO nova.compute.claims [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1000.332480] env[62619]: DEBUG oslo_concurrency.lockutils [req-1597c03d-0b47-4f83-a72c-35e6d1e4cd4e req-94059f68-8dd1-4cf9-91d3-986b325b9437 service nova] Releasing lock "refresh_cache-48f193f6-9928-4098-8830-dadda0eb11e6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.493510] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52335443-092d-322d-01e9-99246788b187, 'name': SearchDatastore_Task, 'duration_secs': 0.013163} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1000.494436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1000.494436] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1000.494436] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1000.494628] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1000.494743] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1000.495028] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-473996c1-bb57-4db1-87e1-311d618ecef9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.503445] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1000.503654] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1000.504410] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc200080-82e2-49ee-9f73-9d2484809e2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1000.511629] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1000.511629] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525949c4-d0a9-9180-21c0-7347bd48425f" [ 1000.511629] env[62619]: _type = "Task" [ 1000.511629] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1000.520237] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525949c4-d0a9-9180-21c0-7347bd48425f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1000.549166] env[62619]: DEBUG nova.network.neutron [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updating instance_info_cache with network_info: [{"id": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "address": "fa:16:3e:2b:fd:42", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec268e92-8f", "ovs_interfaceid": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.681672] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4e027174-a938-468c-be5d-1ae8b6a7ce9e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f5560f62-634d-42e5-9354-68089db01e33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 23.820s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1000.766944] env[62619]: DEBUG nova.network.neutron [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1000.792895] env[62619]: INFO nova.compute.resource_tracker [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating resource usage from migration 4370cfc0-cda7-4112-838d-ba30960bc35a [ 1000.909015] env[62619]: DEBUG nova.network.neutron [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Updating instance_info_cache with network_info: [{"id": "b9b9172c-1cfb-4734-a287-219ce108c9d8", "address": "fa:16:3e:c9:a3:7f", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9b9172c-1c", "ovs_interfaceid": "b9b9172c-1cfb-4734-a287-219ce108c9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1000.975520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1000.975851] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.001961] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9b79f25-fb20-4139-892f-e15213161d83 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.009452] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a050fcb-ce75-496f-b89a-af026ef2ded8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.022678] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525949c4-d0a9-9180-21c0-7347bd48425f, 'name': SearchDatastore_Task, 'duration_secs': 0.010966} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.048256] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9a678e98-d9af-4aa0-b9a5-d831b976c4b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.051083] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47e56fef-fc8b-41f2-afc9-af31f60c5f4e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.053458] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Releasing lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.058269] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1001.058269] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52425342-7f4f-f4b9-c9b0-749ee0a2c55e" [ 1001.058269] env[62619]: _type = "Task" [ 1001.058269] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.064052] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50bac18a-5b9a-4472-bb21-d964201f684e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.073290] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52425342-7f4f-f4b9-c9b0-749ee0a2c55e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.083179] env[62619]: DEBUG nova.compute.manager [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Received event network-vif-plugged-b9b9172c-1cfb-4734-a287-219ce108c9d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1001.083444] env[62619]: DEBUG oslo_concurrency.lockutils [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] Acquiring lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1001.083766] env[62619]: DEBUG oslo_concurrency.lockutils [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] Lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1001.084063] env[62619]: DEBUG oslo_concurrency.lockutils [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] Lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1001.084359] env[62619]: DEBUG nova.compute.manager [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] No waiting events found dispatching network-vif-plugged-b9b9172c-1cfb-4734-a287-219ce108c9d8 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1001.084642] env[62619]: WARNING nova.compute.manager [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Received unexpected event network-vif-plugged-b9b9172c-1cfb-4734-a287-219ce108c9d8 for instance with vm_state building and task_state spawning. [ 1001.084895] env[62619]: DEBUG nova.compute.manager [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Received event network-changed-b9b9172c-1cfb-4734-a287-219ce108c9d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1001.085101] env[62619]: DEBUG nova.compute.manager [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Refreshing instance network info cache due to event network-changed-b9b9172c-1cfb-4734-a287-219ce108c9d8. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1001.085279] env[62619]: DEBUG oslo_concurrency.lockutils [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] Acquiring lock "refresh_cache-fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1001.086078] env[62619]: DEBUG nova.compute.provider_tree [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1001.412070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "refresh_cache-fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.412426] env[62619]: DEBUG nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Instance network_info: |[{"id": "b9b9172c-1cfb-4734-a287-219ce108c9d8", "address": "fa:16:3e:c9:a3:7f", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9b9172c-1c", "ovs_interfaceid": "b9b9172c-1cfb-4734-a287-219ce108c9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1001.412747] env[62619]: DEBUG oslo_concurrency.lockutils [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] Acquired lock "refresh_cache-fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1001.412931] env[62619]: DEBUG nova.network.neutron [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Refreshing network info cache for port b9b9172c-1cfb-4734-a287-219ce108c9d8 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1001.414274] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c9:a3:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b9b9172c-1cfb-4734-a287-219ce108c9d8', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1001.421674] env[62619]: DEBUG oslo.service.loopingcall [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1001.422605] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1001.422834] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5b5f5245-2e06-4a81-93be-7fef8e68bcd1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.482224] env[62619]: DEBUG nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1001.504418] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1001.504418] env[62619]: value = "task-1365301" [ 1001.504418] env[62619]: _type = "Task" [ 1001.504418] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.511974] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365301, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.558340] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1001.558641] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-557abaf5-b594-4b31-af5f-2dc7e7f7e427 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.569037] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52425342-7f4f-f4b9-c9b0-749ee0a2c55e, 'name': SearchDatastore_Task, 'duration_secs': 0.019984} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1001.570366] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1001.570711] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 48f193f6-9928-4098-8830-dadda0eb11e6/48f193f6-9928-4098-8830-dadda0eb11e6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1001.571109] env[62619]: DEBUG oslo_vmware.api [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1001.571109] env[62619]: value = "task-1365302" [ 1001.571109] env[62619]: _type = "Task" [ 1001.571109] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.571322] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b5be519a-9c15-4a3e-85da-0946a617daa3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1001.579908] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1001.579908] env[62619]: value = "task-1365303" [ 1001.579908] env[62619]: _type = "Task" [ 1001.579908] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1001.582395] env[62619]: DEBUG oslo_vmware.api [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365302, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1001.590517] env[62619]: DEBUG nova.scheduler.client.report [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1001.593812] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365303, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.004480] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1002.016387] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365301, 'name': CreateVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.083161] env[62619]: DEBUG oslo_vmware.api [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365302, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.091103] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365303, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.095745] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.resize_claim" :: held 2.315s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1002.095944] env[62619]: INFO nova.compute.manager [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Migrating [ 1002.103445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.099s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1002.104977] env[62619]: INFO nova.compute.claims [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1002.166289] env[62619]: DEBUG nova.network.neutron [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Updated VIF entry in instance network info cache for port b9b9172c-1cfb-4734-a287-219ce108c9d8. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1002.166638] env[62619]: DEBUG nova.network.neutron [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Updating instance_info_cache with network_info: [{"id": "b9b9172c-1cfb-4734-a287-219ce108c9d8", "address": "fa:16:3e:c9:a3:7f", "network": {"id": "3a7eba7a-d0d6-47a0-833b-04eb88c2f323", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-328251676-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c583f4e7b29743aabd3e96f7c53fa04f", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb9b9172c-1c", "ovs_interfaceid": "b9b9172c-1cfb-4734-a287-219ce108c9d8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1002.515228] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365301, 'name': CreateVM_Task, 'duration_secs': 0.606521} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.515625] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1002.516156] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.516336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.516680] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1002.516957] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-77e61e76-ead3-45cc-b073-e1f20f1e5e20 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.521974] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1002.521974] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525199a1-93ed-b29c-c4ae-5f277e7f3d60" [ 1002.521974] env[62619]: _type = "Task" [ 1002.521974] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.529864] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525199a1-93ed-b29c-c4ae-5f277e7f3d60, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.582427] env[62619]: DEBUG oslo_vmware.api [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365302, 'name': PowerOnVM_Task, 'duration_secs': 0.562923} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.582685] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1002.582877] env[62619]: DEBUG nova.compute.manager [None req-5c0d43d7-f692-4ae6-87f0-764fbb409dbf tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1002.583718] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2bbedad-a79b-478e-bbe0-31426aa18888 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.596984] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365303, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.512903} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1002.597265] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 48f193f6-9928-4098-8830-dadda0eb11e6/48f193f6-9928-4098-8830-dadda0eb11e6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1002.597453] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1002.597886] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-0946c946-1a9a-4b1e-a418-8619f666907f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1002.607100] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1002.607100] env[62619]: value = "task-1365304" [ 1002.607100] env[62619]: _type = "Task" [ 1002.607100] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1002.616262] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365304, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1002.617160] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1002.617346] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1002.617522] env[62619]: DEBUG nova.network.neutron [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1002.669283] env[62619]: DEBUG oslo_concurrency.lockutils [req-fed89a41-00d2-4d76-b9e2-994d89589ec4 req-b4dd212e-f1b1-4974-b117-d43027e1fe53 service nova] Releasing lock "refresh_cache-fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.032542] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525199a1-93ed-b29c-c4ae-5f277e7f3d60, 'name': SearchDatastore_Task, 'duration_secs': 0.071867} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.033891] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.034143] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1003.034378] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1003.034570] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1003.034722] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.037331] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5ccd4c59-96cb-4012-b4b2-2707264d9e35 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.054222] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.054467] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1003.055225] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d91e7448-29fb-442a-97c8-9a2545cd6574 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.060399] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1003.060399] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5291c050-67db-24ec-948b-5ebe4b300f8a" [ 1003.060399] env[62619]: _type = "Task" [ 1003.060399] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.068355] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5291c050-67db-24ec-948b-5ebe4b300f8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.117077] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365304, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.224632} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.119883] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1003.122789] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24ac8d7d-bcd8-4007-95d2-4fb4702e27c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.147611] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Reconfiguring VM instance instance-00000062 to attach disk [datastore2] 48f193f6-9928-4098-8830-dadda0eb11e6/48f193f6-9928-4098-8830-dadda0eb11e6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1003.150117] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6c434f72-091e-421d-a57e-081bc56048ba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.170900] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1003.170900] env[62619]: value = "task-1365305" [ 1003.170900] env[62619]: _type = "Task" [ 1003.170900] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.178443] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365305, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.312962] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-00340cb8-daaa-4f04-86dc-c8168082318a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.322486] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11b5dc40-6490-4f57-b201-4ed010ff286d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.355086] env[62619]: DEBUG nova.network.neutron [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance_info_cache with network_info: [{"id": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "address": "fa:16:3e:6f:73:55", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d9c045-8c", "ovs_interfaceid": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1003.357227] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7f9a336-1415-4a01-a75b-6910101294e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.366077] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-80e5e06b-cf66-46ec-b46d-df8308a1a70e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.381012] env[62619]: DEBUG nova.compute.provider_tree [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1003.540757] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.541166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.541317] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1003.541472] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1003.541647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1003.544333] env[62619]: INFO nova.compute.manager [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Terminating instance [ 1003.546305] env[62619]: DEBUG nova.compute.manager [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1003.546549] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1003.547371] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db8d7cd-d86e-4ad2-8878-e2ed778b5d10 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.554749] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1003.554999] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4fe6199e-f2d7-4d4f-a2b3-c9a890e6a966 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.560613] env[62619]: DEBUG oslo_vmware.api [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 1003.560613] env[62619]: value = "task-1365306" [ 1003.560613] env[62619]: _type = "Task" [ 1003.560613] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.571136] env[62619]: DEBUG oslo_vmware.api [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365306, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.574499] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5291c050-67db-24ec-948b-5ebe4b300f8a, 'name': SearchDatastore_Task, 'duration_secs': 0.053963} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.575238] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-62f9eeb7-0b3c-4a29-bd8e-3942295814f3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.579844] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1003.579844] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a0133b-11cd-728d-e31e-2bcebf3e796c" [ 1003.579844] env[62619]: _type = "Task" [ 1003.579844] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.587296] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a0133b-11cd-728d-e31e-2bcebf3e796c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.680214] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365305, 'name': ReconfigVM_Task, 'duration_secs': 0.397705} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1003.680514] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Reconfigured VM instance instance-00000062 to attach disk [datastore2] 48f193f6-9928-4098-8830-dadda0eb11e6/48f193f6-9928-4098-8830-dadda0eb11e6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1003.681203] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-105570d3-5a6f-4b9b-ab34-f3407a4a184b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.687068] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1003.687068] env[62619]: value = "task-1365307" [ 1003.687068] env[62619]: _type = "Task" [ 1003.687068] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.694235] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365307, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.862051] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1003.884621] env[62619]: DEBUG nova.scheduler.client.report [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1004.071081] env[62619]: DEBUG oslo_vmware.api [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365306, 'name': PowerOffVM_Task, 'duration_secs': 0.322146} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.071259] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1004.071446] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1004.071710] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-53b699b3-3c86-4ca2-88bc-c2c4693d626f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.089459] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52a0133b-11cd-728d-e31e-2bcebf3e796c, 'name': SearchDatastore_Task, 'duration_secs': 0.008954} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.089693] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1004.089942] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542/fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1004.090192] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-5bc66af1-cfab-4a9e-94ef-3b8a32622050 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.096122] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1004.096122] env[62619]: value = "task-1365309" [ 1004.096122] env[62619]: _type = "Task" [ 1004.096122] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.103422] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365309, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.134440] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1004.134717] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1004.134963] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleting the datastore file [datastore1] 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1004.135288] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-20ea9b50-8d66-41fd-8b9a-c01a7fc0af7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.141502] env[62619]: DEBUG oslo_vmware.api [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for the task: (returnval){ [ 1004.141502] env[62619]: value = "task-1365310" [ 1004.141502] env[62619]: _type = "Task" [ 1004.141502] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.149115] env[62619]: DEBUG oslo_vmware.api [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365310, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.198254] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365307, 'name': Rename_Task, 'duration_secs': 0.144289} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.198626] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1004.198914] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-39a3fa3e-ffd5-469f-8abb-d1ed3eb15b5d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.205285] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1004.205285] env[62619]: value = "task-1365311" [ 1004.205285] env[62619]: _type = "Task" [ 1004.205285] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.213576] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365311, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.390323] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.287s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1004.391130] env[62619]: DEBUG nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1004.606373] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365309, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493295} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.606752] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542/fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1004.607040] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1004.607364] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-4ce6f9ac-e441-4233-834a-56ccecad9957 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.613447] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1004.613447] env[62619]: value = "task-1365312" [ 1004.613447] env[62619]: _type = "Task" [ 1004.613447] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1004.621875] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365312, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.650600] env[62619]: DEBUG oslo_vmware.api [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Task: {'id': task-1365310, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.193734} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.650889] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.651105] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1004.651295] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1004.651483] env[62619]: INFO nova.compute.manager [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1004.651924] env[62619]: DEBUG oslo.service.loopingcall [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1004.652255] env[62619]: DEBUG nova.compute.manager [-] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1004.652406] env[62619]: DEBUG nova.network.neutron [-] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1004.716665] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365311, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.896477] env[62619]: DEBUG nova.compute.utils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1004.899028] env[62619]: DEBUG nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1004.899028] env[62619]: DEBUG nova.network.neutron [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1004.913349] env[62619]: DEBUG nova.compute.manager [req-b9e89b59-1a97-47ac-a61a-5e3407215727 req-6dda6ddd-7619-46a9-8737-727890cb32c2 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Received event network-vif-deleted-8c7fa88a-7dbd-49cf-b490-e311fa9a804e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1004.913566] env[62619]: INFO nova.compute.manager [req-b9e89b59-1a97-47ac-a61a-5e3407215727 req-6dda6ddd-7619-46a9-8737-727890cb32c2 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Neutron deleted interface 8c7fa88a-7dbd-49cf-b490-e311fa9a804e; detaching it from the instance and deleting it from the info cache [ 1004.913742] env[62619]: DEBUG nova.network.neutron [req-b9e89b59-1a97-47ac-a61a-5e3407215727 req-6dda6ddd-7619-46a9-8737-727890cb32c2 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1004.940945] env[62619]: DEBUG nova.policy [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3581d242034a4c8b00417545bd52ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61b5e689e5544e6857baf8d3c52fe0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1005.122937] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365312, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062621} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.123365] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1005.124211] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a333b9a7-660f-46e9-96e7-56707018a57e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.151025] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Reconfiguring VM instance instance-00000063 to attach disk [datastore2] fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542/fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1005.151025] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-22c1bc42-257d-4405-a5b8-0adada0e3b55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.173252] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1005.173252] env[62619]: value = "task-1365313" [ 1005.173252] env[62619]: _type = "Task" [ 1005.173252] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.867748] env[62619]: DEBUG nova.network.neutron [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Successfully created port: ee20914c-0d83-4518-b2c4-83fdd669116b {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1005.871904] env[62619]: DEBUG nova.network.neutron [-] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.873068] env[62619]: DEBUG nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1005.883048] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b980ae0-e07e-427b-b76f-cb72dda57dfe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.883048] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a7f6e4-9c22-4835-8a31-f79847f123a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.889405] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365313, 'name': ReconfigVM_Task, 'duration_secs': 0.547069} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.890593] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Reconfigured VM instance instance-00000063 to attach disk [datastore2] fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542/fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1005.891284] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-197ed6fa-73e0-41eb-b52b-d4722b1d0710 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.911145] env[62619]: DEBUG oslo_vmware.api [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365311, 'name': PowerOnVM_Task, 'duration_secs': 1.084245} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1005.911145] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance '1c406554-d91d-422a-9a5a-9e910fc51103' progress to 0 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1005.915931] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1005.915931] env[62619]: INFO nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Took 9.08 seconds to spawn the instance on the hypervisor. [ 1005.915931] env[62619]: DEBUG nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1005.918254] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81aa6a5f-7057-4fe0-b2bf-04eccfbd0bb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.930429] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c9b089-6b20-4105-ab9f-49663a85d565 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.933930] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1005.933930] env[62619]: value = "task-1365314" [ 1005.933930] env[62619]: _type = "Task" [ 1005.933930] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1005.952497] env[62619]: DEBUG nova.compute.manager [req-b9e89b59-1a97-47ac-a61a-5e3407215727 req-6dda6ddd-7619-46a9-8737-727890cb32c2 service nova] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Detach interface failed, port_id=8c7fa88a-7dbd-49cf-b490-e311fa9a804e, reason: Instance 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1005.956340] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365314, 'name': Rename_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.376817] env[62619]: INFO nova.compute.manager [-] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Took 1.72 seconds to deallocate network for instance. [ 1006.432091] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1006.432760] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6578c8c-3881-493b-838e-66e533aecc35 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.446616] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365314, 'name': Rename_Task, 'duration_secs': 0.463381} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.450085] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1006.450471] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 1006.450471] env[62619]: value = "task-1365315" [ 1006.450471] env[62619]: _type = "Task" [ 1006.450471] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.451073] env[62619]: INFO nova.compute.manager [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Took 15.39 seconds to build instance. [ 1006.451857] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4d681a51-186a-4c27-ab67-a29ca3c04a64 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.462083] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365315, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.463782] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1006.463782] env[62619]: value = "task-1365316" [ 1006.463782] env[62619]: _type = "Task" [ 1006.463782] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1006.472191] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365316, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1006.867493] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed958fa5-f5fd-4e9b-aa04-f832eebd188c tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "interface-48f193f6-9928-4098-8830-dadda0eb11e6-None" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.867697] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed958fa5-f5fd-4e9b-aa04-f832eebd188c tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "interface-48f193f6-9928-4098-8830-dadda0eb11e6-None" acquired by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.868041] env[62619]: DEBUG nova.objects.instance [None req-ed958fa5-f5fd-4e9b-aa04-f832eebd188c tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lazy-loading 'flavor' on Instance uuid 48f193f6-9928-4098-8830-dadda0eb11e6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.882970] env[62619]: DEBUG nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1006.887566] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1006.887716] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1006.887940] env[62619]: DEBUG nova.objects.instance [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lazy-loading 'resources' on Instance uuid 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1006.912455] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1006.912739] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1006.912899] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.913107] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1006.913314] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.913479] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1006.913689] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1006.913850] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1006.914031] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1006.914195] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1006.914394] env[62619]: DEBUG nova.virt.hardware [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1006.915735] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c661065e-aec6-46e7-ac8f-b26d2e4d389a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.923851] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11cd4421-6621-4224-aa3e-5a518ed27491 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.954937] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65e97cbc-0d77-4f86-85f3-197ba8fbb0ab tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "48f193f6-9928-4098-8830-dadda0eb11e6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.904s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1006.963904] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365315, 'name': PowerOffVM_Task, 'duration_secs': 0.183285} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1006.964306] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1006.964587] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance '1c406554-d91d-422a-9a5a-9e910fc51103' progress to 17 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1006.977868] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365316, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.281505] env[62619]: DEBUG nova.compute.manager [req-2841274e-4456-43ed-9f2b-41e05c3e0c4d req-b91a6f6f-0f93-4f8d-a397-c5c7a401bcc1 service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Received event network-vif-plugged-ee20914c-0d83-4518-b2c4-83fdd669116b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1007.281731] env[62619]: DEBUG oslo_concurrency.lockutils [req-2841274e-4456-43ed-9f2b-41e05c3e0c4d req-b91a6f6f-0f93-4f8d-a397-c5c7a401bcc1 service nova] Acquiring lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1007.281941] env[62619]: DEBUG oslo_concurrency.lockutils [req-2841274e-4456-43ed-9f2b-41e05c3e0c4d req-b91a6f6f-0f93-4f8d-a397-c5c7a401bcc1 service nova] Lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1007.282135] env[62619]: DEBUG oslo_concurrency.lockutils [req-2841274e-4456-43ed-9f2b-41e05c3e0c4d req-b91a6f6f-0f93-4f8d-a397-c5c7a401bcc1 service nova] Lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1007.282310] env[62619]: DEBUG nova.compute.manager [req-2841274e-4456-43ed-9f2b-41e05c3e0c4d req-b91a6f6f-0f93-4f8d-a397-c5c7a401bcc1 service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] No waiting events found dispatching network-vif-plugged-ee20914c-0d83-4518-b2c4-83fdd669116b {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1007.282480] env[62619]: WARNING nova.compute.manager [req-2841274e-4456-43ed-9f2b-41e05c3e0c4d req-b91a6f6f-0f93-4f8d-a397-c5c7a401bcc1 service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Received unexpected event network-vif-plugged-ee20914c-0d83-4518-b2c4-83fdd669116b for instance with vm_state building and task_state spawning. [ 1007.358826] env[62619]: DEBUG nova.network.neutron [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Successfully updated port: ee20914c-0d83-4518-b2c4-83fdd669116b {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1007.371558] env[62619]: DEBUG nova.objects.instance [None req-ed958fa5-f5fd-4e9b-aa04-f832eebd188c tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lazy-loading 'pci_requests' on Instance uuid 48f193f6-9928-4098-8830-dadda0eb11e6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1007.478533] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:50Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='84',id=12,is_public=True,memory_mb=256,name='m1.micro',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1007.478806] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1007.478967] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1007.479176] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1007.479327] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1007.479475] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1007.479675] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1007.479831] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1007.479993] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1007.480242] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1007.480326] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1007.485231] env[62619]: DEBUG oslo_vmware.api [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365316, 'name': PowerOnVM_Task, 'duration_secs': 0.549331} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1007.487585] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ea90ea45-d12f-4598-9b1c-7c754bb6d3d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.497210] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1007.497422] env[62619]: INFO nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Took 8.37 seconds to spawn the instance on the hypervisor. [ 1007.497665] env[62619]: DEBUG nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1007.499031] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8995154c-7ee1-404c-bc84-f3ea703d40df {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.511473] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 1007.511473] env[62619]: value = "task-1365317" [ 1007.511473] env[62619]: _type = "Task" [ 1007.511473] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.519704] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365317, 'name': ReconfigVM_Task} progress is 10%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1007.579254] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92fc5369-4262-4c66-b6dd-fa6eb432c800 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.587509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08ea5c9b-09ad-4b00-ab57-cdc6aef537ad {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.617846] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f51c9f78-38ad-4bd2-a519-db6023079c4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.625160] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f52f7c0b-bcb5-4c87-8760-427be4bedec6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.638579] env[62619]: DEBUG nova.compute.provider_tree [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1007.862027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "refresh_cache-bff6a2d3-3d0c-42df-8129-f78e321d1b7e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1007.862027] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "refresh_cache-bff6a2d3-3d0c-42df-8129-f78e321d1b7e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1007.862189] env[62619]: DEBUG nova.network.neutron [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1007.874379] env[62619]: DEBUG nova.objects.base [None req-ed958fa5-f5fd-4e9b-aa04-f832eebd188c tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Object Instance<48f193f6-9928-4098-8830-dadda0eb11e6> lazy-loaded attributes: flavor,pci_requests {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1007.874695] env[62619]: DEBUG nova.network.neutron [None req-ed958fa5-f5fd-4e9b-aa04-f832eebd188c tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1007.964086] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ed958fa5-f5fd-4e9b-aa04-f832eebd188c tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "interface-48f193f6-9928-4098-8830-dadda0eb11e6-None" "released" by "nova.compute.manager.ComputeManager.attach_interface..do_attach_interface" :: held 1.096s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.023336] env[62619]: INFO nova.compute.manager [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Took 16.54 seconds to build instance. [ 1008.028081] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365317, 'name': ReconfigVM_Task, 'duration_secs': 0.222946} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.028287] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance '1c406554-d91d-422a-9a5a-9e910fc51103' progress to 33 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1008.141669] env[62619]: DEBUG nova.scheduler.client.report [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1008.391381] env[62619]: DEBUG nova.network.neutron [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1008.522890] env[62619]: DEBUG nova.network.neutron [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Updating instance_info_cache with network_info: [{"id": "ee20914c-0d83-4518-b2c4-83fdd669116b", "address": "fa:16:3e:19:a8:5e", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee20914c-0d", "ovs_interfaceid": "ee20914c-0d83-4518-b2c4-83fdd669116b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1008.525925] env[62619]: DEBUG oslo_concurrency.lockutils [None req-f2b95c87-b835-4ca2-a997-76e484aa1bdc tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 18.058s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.533988] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format='bare',created_at=,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=1,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=,status=,tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1008.534242] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1008.534403] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1008.534588] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1008.534737] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1008.534886] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1008.535128] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1008.535294] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1008.535462] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1008.535657] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1008.535842] env[62619]: DEBUG nova.virt.hardware [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1008.541716] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Reconfiguring VM instance instance-00000060 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1008.542531] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-54d19c6c-0fd4-4a14-b4b4-16c9e4612833 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.564019] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 1008.564019] env[62619]: value = "task-1365318" [ 1008.564019] env[62619]: _type = "Task" [ 1008.564019] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.572797] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365318, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.647431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.760s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.671149] env[62619]: INFO nova.scheduler.client.report [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Deleted allocations for instance 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a [ 1008.956106] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.956541] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.956639] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1008.956874] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1008.957038] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1008.960100] env[62619]: INFO nova.compute.manager [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Terminating instance [ 1008.962037] env[62619]: DEBUG nova.compute.manager [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1008.962237] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1008.963078] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e8d629-1414-4e19-bd74-b507ecce5be2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.972885] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1008.973135] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-0675e20f-c3f9-4858-9417-b6d4c6a378fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.980899] env[62619]: DEBUG oslo_vmware.api [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1008.980899] env[62619]: value = "task-1365319" [ 1008.980899] env[62619]: _type = "Task" [ 1008.980899] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.988640] env[62619]: DEBUG oslo_vmware.api [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365319, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.025689] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "refresh_cache-bff6a2d3-3d0c-42df-8129-f78e321d1b7e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1009.026043] env[62619]: DEBUG nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Instance network_info: |[{"id": "ee20914c-0d83-4518-b2c4-83fdd669116b", "address": "fa:16:3e:19:a8:5e", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee20914c-0d", "ovs_interfaceid": "ee20914c-0d83-4518-b2c4-83fdd669116b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1009.026499] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:19:a8:5e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'ee20914c-0d83-4518-b2c4-83fdd669116b', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1009.033842] env[62619]: DEBUG oslo.service.loopingcall [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1009.034082] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1009.034325] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70b6e97b-a155-46d0-aa0a-c12332af100c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.053738] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1009.053738] env[62619]: value = "task-1365320" [ 1009.053738] env[62619]: _type = "Task" [ 1009.053738] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.061619] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365320, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.073243] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365318, 'name': ReconfigVM_Task, 'duration_secs': 0.392666} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.073610] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Reconfigured VM instance instance-00000060 to detach disk 2000 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1009.074419] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f93bd530-27fa-4376-9b3f-aa81e8109154 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.097062] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Reconfiguring VM instance instance-00000060 to attach disk [datastore1] 1c406554-d91d-422a-9a5a-9e910fc51103/1c406554-d91d-422a-9a5a-9e910fc51103.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1009.097379] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bd227a22-c35a-422f-ac5e-5caace2c7339 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.116803] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 1009.116803] env[62619]: value = "task-1365321" [ 1009.116803] env[62619]: _type = "Task" [ 1009.116803] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.125384] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365321, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.178304] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a99811b4-0c16-4dfe-9875-f3c53927abd3 tempest-ServersNegativeTestJSON-828238807 tempest-ServersNegativeTestJSON-828238807-project-member] Lock "3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.637s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.308237] env[62619]: DEBUG nova.compute.manager [req-de8ca092-59ec-4bee-b7a1-d901f66dff20 req-1915179e-7a36-466d-b16d-8a2533300434 service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Received event network-changed-ee20914c-0d83-4518-b2c4-83fdd669116b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1009.308411] env[62619]: DEBUG nova.compute.manager [req-de8ca092-59ec-4bee-b7a1-d901f66dff20 req-1915179e-7a36-466d-b16d-8a2533300434 service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Refreshing instance network info cache due to event network-changed-ee20914c-0d83-4518-b2c4-83fdd669116b. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1009.308630] env[62619]: DEBUG oslo_concurrency.lockutils [req-de8ca092-59ec-4bee-b7a1-d901f66dff20 req-1915179e-7a36-466d-b16d-8a2533300434 service nova] Acquiring lock "refresh_cache-bff6a2d3-3d0c-42df-8129-f78e321d1b7e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.308776] env[62619]: DEBUG oslo_concurrency.lockutils [req-de8ca092-59ec-4bee-b7a1-d901f66dff20 req-1915179e-7a36-466d-b16d-8a2533300434 service nova] Acquired lock "refresh_cache-bff6a2d3-3d0c-42df-8129-f78e321d1b7e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.308942] env[62619]: DEBUG nova.network.neutron [req-de8ca092-59ec-4bee-b7a1-d901f66dff20 req-1915179e-7a36-466d-b16d-8a2533300434 service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Refreshing network info cache for port ee20914c-0d83-4518-b2c4-83fdd669116b {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1009.491282] env[62619]: DEBUG oslo_vmware.api [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365319, 'name': PowerOffVM_Task, 'duration_secs': 0.227654} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.491582] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1009.491802] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1009.492075] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-bb249570-c4c8-4591-bd9b-66a011066437 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.564323] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365320, 'name': CreateVM_Task, 'duration_secs': 0.30854} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.564555] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1009.565272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1009.565611] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1009.565779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1009.566051] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6643336-e681-4a6a-8826-afb34a9f71a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.570726] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1009.570726] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5247b405-86d9-b574-bd10-69061e675708" [ 1009.570726] env[62619]: _type = "Task" [ 1009.570726] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.578034] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5247b405-86d9-b574-bd10-69061e675708, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1009.626356] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365321, 'name': ReconfigVM_Task, 'duration_secs': 0.300897} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1009.626600] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Reconfigured VM instance instance-00000060 to attach disk [datastore1] 1c406554-d91d-422a-9a5a-9e910fc51103/1c406554-d91d-422a-9a5a-9e910fc51103.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1009.626864] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance '1c406554-d91d-422a-9a5a-9e910fc51103' progress to 50 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1009.939214] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "48f193f6-9928-4098-8830-dadda0eb11e6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.939470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "48f193f6-9928-4098-8830-dadda0eb11e6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.939678] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "48f193f6-9928-4098-8830-dadda0eb11e6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1009.939921] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "48f193f6-9928-4098-8830-dadda0eb11e6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1009.940139] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "48f193f6-9928-4098-8830-dadda0eb11e6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1009.942294] env[62619]: INFO nova.compute.manager [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Terminating instance [ 1009.944105] env[62619]: DEBUG nova.compute.manager [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1009.944311] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1009.945141] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8909900a-ccef-4d58-91ee-3448d899c0d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.952532] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1009.952759] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-56b00644-6336-4843-a225-d74799f25f34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1009.958912] env[62619]: DEBUG oslo_vmware.api [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1009.958912] env[62619]: value = "task-1365323" [ 1009.958912] env[62619]: _type = "Task" [ 1009.958912] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1009.967098] env[62619]: DEBUG oslo_vmware.api [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365323, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.082182] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5247b405-86d9-b574-bd10-69061e675708, 'name': SearchDatastore_Task, 'duration_secs': 0.009603} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.082488] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.082749] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1010.082988] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1010.083149] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1010.083364] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1010.083777] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d70a90ce-d144-429b-8844-e21a88538c50 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.091371] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1010.091541] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1010.092252] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4b21d1d9-3c7d-46a6-a059-8b150be0181e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.097091] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1010.097091] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5243032a-6a10-77d4-eb6f-42d539d2d9a5" [ 1010.097091] env[62619]: _type = "Task" [ 1010.097091] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.104615] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5243032a-6a10-77d4-eb6f-42d539d2d9a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.131564] env[62619]: DEBUG nova.network.neutron [req-de8ca092-59ec-4bee-b7a1-d901f66dff20 req-1915179e-7a36-466d-b16d-8a2533300434 service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Updated VIF entry in instance network info cache for port ee20914c-0d83-4518-b2c4-83fdd669116b. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1010.131564] env[62619]: DEBUG nova.network.neutron [req-de8ca092-59ec-4bee-b7a1-d901f66dff20 req-1915179e-7a36-466d-b16d-8a2533300434 service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Updating instance_info_cache with network_info: [{"id": "ee20914c-0d83-4518-b2c4-83fdd669116b", "address": "fa:16:3e:19:a8:5e", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapee20914c-0d", "ovs_interfaceid": "ee20914c-0d83-4518-b2c4-83fdd669116b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.134855] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7874e58c-16be-4c21-adf6-35b4e912f5f7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.153765] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee20c45f-8bbb-4d29-b47a-f7de223d35fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.171989] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance '1c406554-d91d-422a-9a5a-9e910fc51103' progress to 67 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1010.469967] env[62619]: DEBUG oslo_vmware.api [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365323, 'name': PowerOffVM_Task, 'duration_secs': 0.20712} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.473541] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1010.474179] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1010.474411] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-957a7b8c-118c-4688-b67f-35a121dd96e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.608038] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5243032a-6a10-77d4-eb6f-42d539d2d9a5, 'name': SearchDatastore_Task, 'duration_secs': 0.010183} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1010.609742] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-22ffc548-ae32-4d13-9873-59954cebeb3d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1010.615061] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1010.615061] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524870c3-cff0-6c4c-4ff8-0884e4eeb25d" [ 1010.615061] env[62619]: _type = "Task" [ 1010.615061] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1010.623212] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524870c3-cff0-6c4c-4ff8-0884e4eeb25d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1010.635174] env[62619]: DEBUG oslo_concurrency.lockutils [req-de8ca092-59ec-4bee-b7a1-d901f66dff20 req-1915179e-7a36-466d-b16d-8a2533300434 service nova] Releasing lock "refresh_cache-bff6a2d3-3d0c-42df-8129-f78e321d1b7e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1010.732999] env[62619]: DEBUG nova.network.neutron [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Port b2d9c045-8c75-4e70-a8de-8f4587d036a1 binding to destination host cpu-1 is already ACTIVE {{(pid=62619) migrate_instance_start /opt/stack/nova/nova/network/neutron.py:3171}} [ 1011.124368] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]524870c3-cff0-6c4c-4ff8-0884e4eeb25d, 'name': SearchDatastore_Task, 'duration_secs': 0.008622} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.124892] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1011.125147] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] bff6a2d3-3d0c-42df-8129-f78e321d1b7e/bff6a2d3-3d0c-42df-8129-f78e321d1b7e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1011.125457] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-29961253-162b-4c7b-afbb-8600b1c4161b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.131896] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1011.131896] env[62619]: value = "task-1365325" [ 1011.131896] env[62619]: _type = "Task" [ 1011.131896] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.139972] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365325, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.302922] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1011.303203] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1011.303390] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleting the datastore file [datastore2] fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1011.303625] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e8d15d03-0df5-40d5-b9b1-d8d5e092e91d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.310991] env[62619]: DEBUG oslo_vmware.api [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for the task: (returnval){ [ 1011.310991] env[62619]: value = "task-1365326" [ 1011.310991] env[62619]: _type = "Task" [ 1011.310991] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.315319] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1011.315560] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1011.315935] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Deleting the datastore file [datastore2] 48f193f6-9928-4098-8830-dadda0eb11e6 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1011.316422] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-d455b979-86d7-46e4-8458-6dfb271d0bce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.321866] env[62619]: DEBUG oslo_vmware.api [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365326, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.326929] env[62619]: DEBUG oslo_vmware.api [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for the task: (returnval){ [ 1011.326929] env[62619]: value = "task-1365327" [ 1011.326929] env[62619]: _type = "Task" [ 1011.326929] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.336397] env[62619]: DEBUG oslo_vmware.api [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365327, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.641359] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365325, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.459397} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.641692] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] bff6a2d3-3d0c-42df-8129-f78e321d1b7e/bff6a2d3-3d0c-42df-8129-f78e321d1b7e.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1011.641946] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1011.642279] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-d4bc6798-446a-4fea-96dd-8b7710a2bf0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1011.648916] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1011.648916] env[62619]: value = "task-1365328" [ 1011.648916] env[62619]: _type = "Task" [ 1011.648916] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1011.658096] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365328, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1011.759882] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1011.760156] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1011.760336] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1011.820900] env[62619]: DEBUG oslo_vmware.api [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Task: {'id': task-1365326, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.345164} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.821172] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.821361] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.821538] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.821709] env[62619]: INFO nova.compute.manager [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Took 2.86 seconds to destroy the instance on the hypervisor. [ 1011.821945] env[62619]: DEBUG oslo.service.loopingcall [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.822149] env[62619]: DEBUG nova.compute.manager [-] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1011.822246] env[62619]: DEBUG nova.network.neutron [-] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1011.834896] env[62619]: DEBUG oslo_vmware.api [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Task: {'id': task-1365327, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.425082} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1011.835097] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1011.835285] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1011.835464] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1011.835638] env[62619]: INFO nova.compute.manager [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Took 1.89 seconds to destroy the instance on the hypervisor. [ 1011.835873] env[62619]: DEBUG oslo.service.loopingcall [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1011.836074] env[62619]: DEBUG nova.compute.manager [-] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1011.836166] env[62619]: DEBUG nova.network.neutron [-] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1012.158283] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365328, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.059673} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.158621] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1012.159343] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4835b569-4627-42a3-bc11-bef46083f65d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.181625] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Reconfiguring VM instance instance-00000064 to attach disk [datastore1] bff6a2d3-3d0c-42df-8129-f78e321d1b7e/bff6a2d3-3d0c-42df-8129-f78e321d1b7e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1012.182120] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-be4ef244-beeb-4bfe-8e6b-a8fea7b4d928 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.203504] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1012.203504] env[62619]: value = "task-1365329" [ 1012.203504] env[62619]: _type = "Task" [ 1012.203504] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.216433] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365329, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.415598] env[62619]: DEBUG nova.compute.manager [req-7910f920-a996-44e9-ac55-f5dd55b7376e req-aedeae3b-3d22-41aa-a684-f180032b4ec4 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Received event network-vif-deleted-b59999a8-a7e9-4ef1-9f72-7eebd30b174b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1012.415598] env[62619]: INFO nova.compute.manager [req-7910f920-a996-44e9-ac55-f5dd55b7376e req-aedeae3b-3d22-41aa-a684-f180032b4ec4 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Neutron deleted interface b59999a8-a7e9-4ef1-9f72-7eebd30b174b; detaching it from the instance and deleting it from the info cache [ 1012.415598] env[62619]: DEBUG nova.network.neutron [req-7910f920-a996-44e9-ac55-f5dd55b7376e req-aedeae3b-3d22-41aa-a684-f180032b4ec4 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.713479] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365329, 'name': ReconfigVM_Task, 'duration_secs': 0.321266} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1012.713863] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Reconfigured VM instance instance-00000064 to attach disk [datastore1] bff6a2d3-3d0c-42df-8129-f78e321d1b7e/bff6a2d3-3d0c-42df-8129-f78e321d1b7e.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1012.714560] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-771d8a97-f937-4340-b93f-506f130d1fa1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.722959] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1012.722959] env[62619]: value = "task-1365330" [ 1012.722959] env[62619]: _type = "Task" [ 1012.722959] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1012.731130] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365330, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1012.796785] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1012.796983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1012.797191] env[62619]: DEBUG nova.network.neutron [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1012.889645] env[62619]: DEBUG nova.network.neutron [-] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.893317] env[62619]: DEBUG nova.network.neutron [-] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1012.917599] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7f256097-5039-4279-8540-dafb93778d7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.926797] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b7fe3c-0a36-4d0a-a93b-f43aa53dc1b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1012.957195] env[62619]: DEBUG nova.compute.manager [req-7910f920-a996-44e9-ac55-f5dd55b7376e req-aedeae3b-3d22-41aa-a684-f180032b4ec4 service nova] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Detach interface failed, port_id=b59999a8-a7e9-4ef1-9f72-7eebd30b174b, reason: Instance 48f193f6-9928-4098-8830-dadda0eb11e6 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1013.234144] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365330, 'name': Rename_Task, 'duration_secs': 0.144864} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.234484] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1013.234701] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5a531290-f44c-4f79-9a1f-ea28e4643a23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.241511] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1013.241511] env[62619]: value = "task-1365331" [ 1013.241511] env[62619]: _type = "Task" [ 1013.241511] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1013.249779] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365331, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1013.395966] env[62619]: INFO nova.compute.manager [-] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Took 1.56 seconds to deallocate network for instance. [ 1013.395966] env[62619]: INFO nova.compute.manager [-] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Took 1.57 seconds to deallocate network for instance. [ 1013.541717] env[62619]: DEBUG nova.network.neutron [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance_info_cache with network_info: [{"id": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "address": "fa:16:3e:6f:73:55", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d9c045-8c", "ovs_interfaceid": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1013.751755] env[62619]: DEBUG oslo_vmware.api [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365331, 'name': PowerOnVM_Task, 'duration_secs': 0.435212} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1013.752061] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1013.752306] env[62619]: INFO nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Took 6.87 seconds to spawn the instance on the hypervisor. [ 1013.752508] env[62619]: DEBUG nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1013.753436] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81faefee-7f1e-4c26-a6f2-3c7c8f93d785 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1013.908138] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1013.908518] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1013.908827] env[62619]: DEBUG nova.objects.instance [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lazy-loading 'resources' on Instance uuid fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1013.910529] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1014.045544] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1014.271227] env[62619]: INFO nova.compute.manager [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Took 12.28 seconds to build instance. [ 1014.449211] env[62619]: DEBUG nova.compute.manager [req-0a3d173b-9291-4ff3-ae1a-d7f6714cbcb6 req-ee5608d1-592f-4c6e-a02d-e4eb414b79f3 service nova] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Received event network-vif-deleted-b9b9172c-1cfb-4734-a287-219ce108c9d8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1014.567326] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aa46d81-c0f8-49f0-b3ad-07d6487059c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.589094] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-305d9129-62a0-4132-8d2b-0a52b5d623b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.592286] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be4118f6-c32b-4c1f-b1ad-c8e4b15d84aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.601503] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-027cea6f-efdf-410a-9a79-6fcac5796149 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.604615] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance '1c406554-d91d-422a-9a5a-9e910fc51103' progress to 83 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1014.637092] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92128e4d-852a-42e8-9c93-ed7b081c6fa4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.644650] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea409977-52f9-48e7-a892-b7476590ebee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1014.657830] env[62619]: DEBUG nova.compute.provider_tree [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1014.776167] env[62619]: DEBUG oslo_concurrency.lockutils [None req-7703e0df-8997-446d-a321-8f42d8ed4c36 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.800s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.111881] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1015.112245] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-4fa2a1a7-8c79-4a81-8124-fa9f51c4751d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.119217] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 1015.119217] env[62619]: value = "task-1365332" [ 1015.119217] env[62619]: _type = "Task" [ 1015.119217] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.127622] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365332, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.161024] env[62619]: DEBUG nova.scheduler.client.report [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1015.629466] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365332, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1015.667274] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.759s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.669956] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.759s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.670302] env[62619]: DEBUG nova.objects.instance [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lazy-loading 'resources' on Instance uuid 48f193f6-9928-4098-8830-dadda0eb11e6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1015.671466] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.671711] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.671929] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1015.672143] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1015.672338] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1015.675140] env[62619]: INFO nova.compute.manager [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Terminating instance [ 1015.676885] env[62619]: DEBUG nova.compute.manager [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1015.677116] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1015.678039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e4e2e33-ef22-42c4-a3c6-9fe16e9814cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.686767] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1015.687802] env[62619]: INFO nova.scheduler.client.report [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Deleted allocations for instance fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542 [ 1015.688566] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-d54f6e80-9cff-4ed2-9a70-c899d96c8130 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1015.700415] env[62619]: DEBUG oslo_vmware.api [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1015.700415] env[62619]: value = "task-1365333" [ 1015.700415] env[62619]: _type = "Task" [ 1015.700415] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1015.708985] env[62619]: DEBUG oslo_vmware.api [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365333, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.132761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1016.133050] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1016.134278] env[62619]: DEBUG oslo_vmware.api [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365332, 'name': PowerOnVM_Task, 'duration_secs': 0.740739} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.134730] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1016.134954] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9136b3f1-26d0-45ea-8f8e-a47e30679ed2 tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance '1c406554-d91d-422a-9a5a-9e910fc51103' progress to 100 {{(pid=62619) _update_instance_progress /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1352}} [ 1016.198090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b6efa61c-12e2-4a60-9c1f-ad3ddcee1075 tempest-ServerDiskConfigTestJSON-1756074416 tempest-ServerDiskConfigTestJSON-1756074416-project-member] Lock "fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 7.242s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1016.215976] env[62619]: DEBUG oslo_vmware.api [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365333, 'name': PowerOffVM_Task, 'duration_secs': 0.234897} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.216513] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1016.216690] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1016.216952] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-75ee7bde-e2de-436f-b6ae-42618c7680cb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.295445] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1016.295982] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1016.295982] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleting the datastore file [datastore1] bff6a2d3-3d0c-42df-8129-f78e321d1b7e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1016.296218] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3db3dcd5-9f0a-4030-a5b5-dc894fd8b798 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.303166] env[62619]: DEBUG oslo_vmware.api [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1016.303166] env[62619]: value = "task-1365335" [ 1016.303166] env[62619]: _type = "Task" [ 1016.303166] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1016.313204] env[62619]: DEBUG oslo_vmware.api [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365335, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1016.337367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc1b1fad-2701-46aa-b7f1-26103b1d9b3b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.344786] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56b62959-85eb-4472-af6d-8c82468aa8ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.376370] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-904620c0-efbb-42d6-a2e6-83ea545940ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.383465] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-711c1231-8118-4cdc-830d-2846c7b4366b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1016.397542] env[62619]: DEBUG nova.compute.provider_tree [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1016.636026] env[62619]: DEBUG nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1016.815200] env[62619]: DEBUG oslo_vmware.api [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365335, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.293775} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1016.815582] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1016.815582] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1016.815795] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1016.815922] env[62619]: INFO nova.compute.manager [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Took 1.14 seconds to destroy the instance on the hypervisor. [ 1016.816184] env[62619]: DEBUG oslo.service.loopingcall [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1016.816382] env[62619]: DEBUG nova.compute.manager [-] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1016.816476] env[62619]: DEBUG nova.network.neutron [-] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1016.900329] env[62619]: DEBUG nova.scheduler.client.report [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1017.166178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1017.251973] env[62619]: DEBUG nova.compute.manager [req-3e1d16c3-0ca9-40bd-bbb8-5987cc33155b req-e2b35f34-b676-4ac6-b5e4-e389f6741eea service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Received event network-vif-deleted-ee20914c-0d83-4518-b2c4-83fdd669116b {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1017.251973] env[62619]: INFO nova.compute.manager [req-3e1d16c3-0ca9-40bd-bbb8-5987cc33155b req-e2b35f34-b676-4ac6-b5e4-e389f6741eea service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Neutron deleted interface ee20914c-0d83-4518-b2c4-83fdd669116b; detaching it from the instance and deleting it from the info cache [ 1017.251973] env[62619]: DEBUG nova.network.neutron [req-3e1d16c3-0ca9-40bd-bbb8-5987cc33155b req-e2b35f34-b676-4ac6-b5e4-e389f6741eea service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.405387] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.735s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1017.408493] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.245s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1017.410012] env[62619]: INFO nova.compute.claims [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1017.431058] env[62619]: INFO nova.scheduler.client.report [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Deleted allocations for instance 48f193f6-9928-4098-8830-dadda0eb11e6 [ 1017.726642] env[62619]: DEBUG nova.network.neutron [-] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1017.757270] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-8b8f1d09-dcd7-4b84-9ce7-13d30ccee506 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.766420] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-952a2dc7-e916-4bee-9e21-c4d3c7934cd8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1017.795033] env[62619]: DEBUG nova.compute.manager [req-3e1d16c3-0ca9-40bd-bbb8-5987cc33155b req-e2b35f34-b676-4ac6-b5e4-e389f6741eea service nova] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Detach interface failed, port_id=ee20914c-0d83-4518-b2c4-83fdd669116b, reason: Instance bff6a2d3-3d0c-42df-8129-f78e321d1b7e could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1017.942086] env[62619]: DEBUG oslo_concurrency.lockutils [None req-65ea1e06-a8a6-40f1-a2e0-86f630c32ac4 tempest-AttachInterfacesV270Test-1398927168 tempest-AttachInterfacesV270Test-1398927168-project-member] Lock "48f193f6-9928-4098-8830-dadda0eb11e6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 8.002s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1018.012331] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "1c406554-d91d-422a-9a5a-9e910fc51103" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1018.012331] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103" acquired by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1018.012543] env[62619]: DEBUG nova.compute.manager [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Going to confirm migration 3 {{(pid=62619) do_confirm_resize /opt/stack/nova/nova/compute/manager.py:4783}} [ 1018.228806] env[62619]: INFO nova.compute.manager [-] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Took 1.41 seconds to deallocate network for instance. [ 1018.608953] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28d7457b-ae7a-4e48-86a7-412c35d5a21c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.626316] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e29fcd04-2c17-483d-81f0-64ac582933a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.661159] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1018.661256] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquired lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1018.661462] env[62619]: DEBUG nova.network.neutron [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1018.661677] env[62619]: DEBUG nova.objects.instance [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lazy-loading 'info_cache' on Instance uuid 1c406554-d91d-422a-9a5a-9e910fc51103 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1018.665662] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-177b25f9-7331-4f22-beb4-001c23371a80 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.679061] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21c5fe5b-5e61-428f-bdff-6193a2588b94 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1018.694879] env[62619]: DEBUG nova.compute.provider_tree [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1018.738103] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1019.197374] env[62619]: DEBUG nova.scheduler.client.report [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1019.705363] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.294s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1019.705363] env[62619]: DEBUG nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1019.714724] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.975s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1019.714724] env[62619]: DEBUG nova.objects.instance [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lazy-loading 'resources' on Instance uuid bff6a2d3-3d0c-42df-8129-f78e321d1b7e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1019.908428] env[62619]: DEBUG nova.network.neutron [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance_info_cache with network_info: [{"id": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "address": "fa:16:3e:6f:73:55", "network": {"id": "ca81e7fa-89de-49f2-bc00-6bf81e3e97c9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-1274105894-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "33242a5e0a764cf3b8af687fc4302e8e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "57c65f87-60fd-4882-ab30-31db49131b46", "external-id": "nsx-vlan-transportzone-610", "segmentation_id": 610, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d9c045-8c", "ovs_interfaceid": "b2d9c045-8c75-4e70-a8de-8f4587d036a1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1020.218020] env[62619]: DEBUG nova.compute.utils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1020.224411] env[62619]: DEBUG nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1020.225444] env[62619]: DEBUG nova.network.neutron [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1020.278024] env[62619]: DEBUG nova.policy [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34a14400ec56409ca356b449a9e30cf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60ccdf8f256c427b9767a01dad0616fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1020.364774] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70af2208-32c4-431b-8387-2b9ddcb05e4b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.373787] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c325194-a3f0-4ec3-9406-438a5d5ab15a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.407095] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2913bf28-28b6-4a75-a0e4-969d42d5dfaf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.410279] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Releasing lock "refresh_cache-1c406554-d91d-422a-9a5a-9e910fc51103" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1020.410516] env[62619]: DEBUG nova.objects.instance [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lazy-loading 'migration_context' on Instance uuid 1c406554-d91d-422a-9a5a-9e910fc51103 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1020.416921] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-244a1bf1-fa0c-4cf4-9321-d7e10bd2342c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.436788] env[62619]: DEBUG nova.compute.provider_tree [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1020.567715] env[62619]: DEBUG nova.network.neutron [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Successfully created port: 91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1020.725631] env[62619]: DEBUG nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1020.913033] env[62619]: DEBUG nova.objects.base [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Object Instance<1c406554-d91d-422a-9a5a-9e910fc51103> lazy-loaded attributes: info_cache,migration_context {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1020.914047] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3074a68a-d3a6-401c-9962-03be8a6441ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.938643] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-96676f18-d379-43da-8c63-a458a7bd4bb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1020.942695] env[62619]: DEBUG nova.scheduler.client.report [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1020.946657] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 1020.946657] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f12fcb-d526-1d82-dcdc-1df7df74c853" [ 1020.946657] env[62619]: _type = "Task" [ 1020.946657] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1020.955947] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f12fcb-d526-1d82-dcdc-1df7df74c853, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1021.445781] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.733s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1021.458817] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52f12fcb-d526-1d82-dcdc-1df7df74c853, 'name': SearchDatastore_Task, 'duration_secs': 0.008646} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1021.459451] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1021.459451] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1021.470090] env[62619]: INFO nova.scheduler.client.report [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted allocations for instance bff6a2d3-3d0c-42df-8129-f78e321d1b7e [ 1021.735807] env[62619]: DEBUG nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1021.762131] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1021.762404] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1021.762563] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1021.762755] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1021.762918] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1021.763082] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1021.763293] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1021.763458] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1021.763700] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1021.763878] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1021.764074] env[62619]: DEBUG nova.virt.hardware [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1021.764934] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5873008-1964-47ff-8c21-7088a230531b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.772600] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-431d788f-2d36-435c-9bca-2344583a183d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1021.982347] env[62619]: DEBUG oslo_concurrency.lockutils [None req-98087933-b670-455a-9aba-887dd41fd335 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "bff6a2d3-3d0c-42df-8129-f78e321d1b7e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.310s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.018601] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquiring lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.018882] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.049322] env[62619]: DEBUG nova.compute.manager [req-ca64fa55-d9c9-4a0a-9356-58bb02c6c7ff req-facadea0-2347-4c40-99cc-bba3e60c66ab service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Received event network-vif-plugged-91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1022.049587] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca64fa55-d9c9-4a0a-9356-58bb02c6c7ff req-facadea0-2347-4c40-99cc-bba3e60c66ab service nova] Acquiring lock "460c12f6-5eb8-427c-bda4-6773e1bc9034-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.049675] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca64fa55-d9c9-4a0a-9356-58bb02c6c7ff req-facadea0-2347-4c40-99cc-bba3e60c66ab service nova] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.049879] env[62619]: DEBUG oslo_concurrency.lockutils [req-ca64fa55-d9c9-4a0a-9356-58bb02c6c7ff req-facadea0-2347-4c40-99cc-bba3e60c66ab service nova] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.049968] env[62619]: DEBUG nova.compute.manager [req-ca64fa55-d9c9-4a0a-9356-58bb02c6c7ff req-facadea0-2347-4c40-99cc-bba3e60c66ab service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] No waiting events found dispatching network-vif-plugged-91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1022.050149] env[62619]: WARNING nova.compute.manager [req-ca64fa55-d9c9-4a0a-9356-58bb02c6c7ff req-facadea0-2347-4c40-99cc-bba3e60c66ab service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Received unexpected event network-vif-plugged-91d6b3d0-bc47-408b-8f95-471b793c1330 for instance with vm_state building and task_state spawning. [ 1022.108426] env[62619]: DEBUG nova.network.neutron [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Successfully updated port: 91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1022.174941] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a57bc16-e995-49f8-a3dc-8054d3d533de {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.182672] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f494d95f-44e1-49df-bd31-d4f12858d766 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.216179] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4330781-e70e-4556-92cb-2ab057cc0ecd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.224266] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ef06bc6-7218-432b-96c0-b1ed78b656c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.240180] env[62619]: DEBUG nova.compute.provider_tree [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1022.524964] env[62619]: DEBUG nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1022.527814] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "f5560f62-634d-42e5-9354-68089db01e33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.528429] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f5560f62-634d-42e5-9354-68089db01e33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.528689] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "f5560f62-634d-42e5-9354-68089db01e33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1022.528893] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f5560f62-634d-42e5-9354-68089db01e33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1022.529156] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f5560f62-634d-42e5-9354-68089db01e33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1022.532916] env[62619]: INFO nova.compute.manager [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Terminating instance [ 1022.534849] env[62619]: DEBUG nova.compute.manager [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1022.535103] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1022.535974] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f1eae53b-0c63-4439-8a33-65f0bc52a330 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.543233] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1022.543537] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-e82398de-a9cb-46ec-a2f1-44a7b9c976ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1022.549722] env[62619]: DEBUG oslo_vmware.api [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1022.549722] env[62619]: value = "task-1365336" [ 1022.549722] env[62619]: _type = "Task" [ 1022.549722] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1022.557447] env[62619]: DEBUG oslo_vmware.api [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365336, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1022.611394] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1022.611557] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1022.611604] env[62619]: DEBUG nova.network.neutron [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1022.744162] env[62619]: DEBUG nova.scheduler.client.report [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1023.050006] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1023.058775] env[62619]: DEBUG oslo_vmware.api [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365336, 'name': PowerOffVM_Task, 'duration_secs': 0.299119} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1023.059080] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1023.059257] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1023.059702] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5a9f935-20c6-43d5-ab30-e648ba2c4a9a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.143518] env[62619]: DEBUG nova.network.neutron [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1023.277603] env[62619]: DEBUG nova.network.neutron [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Updating instance_info_cache with network_info: [{"id": "91d6b3d0-bc47-408b-8f95-471b793c1330", "address": "fa:16:3e:df:d8:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d6b3d0-bc", "ovs_interfaceid": "91d6b3d0-bc47-408b-8f95-471b793c1330", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1023.544427] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1023.544678] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1023.544887] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleting the datastore file [datastore1] f5560f62-634d-42e5-9354-68089db01e33 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1023.545183] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9c50c635-e717-4853-a331-b864f68f96a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.550949] env[62619]: DEBUG oslo_vmware.api [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1023.550949] env[62619]: value = "task-1365338" [ 1023.550949] env[62619]: _type = "Task" [ 1023.550949] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.558314] env[62619]: DEBUG oslo_vmware.api [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365338, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1023.755067] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.drop_move_claim_at_source" :: held 2.295s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1023.757906] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.708s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1023.759366] env[62619]: INFO nova.compute.claims [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1023.780620] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1023.780904] env[62619]: DEBUG nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Instance network_info: |[{"id": "91d6b3d0-bc47-408b-8f95-471b793c1330", "address": "fa:16:3e:df:d8:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d6b3d0-bc", "ovs_interfaceid": "91d6b3d0-bc47-408b-8f95-471b793c1330", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1023.781360] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:df:d8:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '91d6b3d0-bc47-408b-8f95-471b793c1330', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1023.788920] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Creating folder: Project (60ccdf8f256c427b9767a01dad0616fd). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1023.789224] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b720aca-1ea8-4e14-a265-a8d88a66277d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.801119] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Created folder: Project (60ccdf8f256c427b9767a01dad0616fd) in parent group-v290436. [ 1023.801321] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Creating folder: Instances. Parent ref: group-v290549. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1023.801557] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7be319a8-54f7-497b-af74-617f1658f2bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.811094] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Created folder: Instances in parent group-v290549. [ 1023.811320] env[62619]: DEBUG oslo.service.loopingcall [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1023.811557] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1023.811765] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a85a55e2-9160-48e9-a0b5-0c827776b68c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1023.829993] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1023.829993] env[62619]: value = "task-1365341" [ 1023.829993] env[62619]: _type = "Task" [ 1023.829993] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1023.837182] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365341, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.061365] env[62619]: DEBUG oslo_vmware.api [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365338, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.270704} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.061679] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1024.061888] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1024.062199] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1024.062412] env[62619]: INFO nova.compute.manager [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: f5560f62-634d-42e5-9354-68089db01e33] Took 1.53 seconds to destroy the instance on the hypervisor. [ 1024.062665] env[62619]: DEBUG oslo.service.loopingcall [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1024.062870] env[62619]: DEBUG nova.compute.manager [-] [instance: f5560f62-634d-42e5-9354-68089db01e33] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1024.062962] env[62619]: DEBUG nova.network.neutron [-] [instance: f5560f62-634d-42e5-9354-68089db01e33] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1024.074228] env[62619]: DEBUG nova.compute.manager [req-186b901f-84a7-4356-beb3-5e0d747964fe req-016e83c9-eff6-4cba-a77f-79aba13b6fc7 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Received event network-changed-91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1024.074330] env[62619]: DEBUG nova.compute.manager [req-186b901f-84a7-4356-beb3-5e0d747964fe req-016e83c9-eff6-4cba-a77f-79aba13b6fc7 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Refreshing instance network info cache due to event network-changed-91d6b3d0-bc47-408b-8f95-471b793c1330. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1024.074551] env[62619]: DEBUG oslo_concurrency.lockutils [req-186b901f-84a7-4356-beb3-5e0d747964fe req-016e83c9-eff6-4cba-a77f-79aba13b6fc7 service nova] Acquiring lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.074707] env[62619]: DEBUG oslo_concurrency.lockutils [req-186b901f-84a7-4356-beb3-5e0d747964fe req-016e83c9-eff6-4cba-a77f-79aba13b6fc7 service nova] Acquired lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.074872] env[62619]: DEBUG nova.network.neutron [req-186b901f-84a7-4356-beb3-5e0d747964fe req-016e83c9-eff6-4cba-a77f-79aba13b6fc7 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Refreshing network info cache for port 91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1024.319197] env[62619]: INFO nova.scheduler.client.report [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted allocation for migration 4370cfc0-cda7-4112-838d-ba30960bc35a [ 1024.340055] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365341, 'name': CreateVM_Task, 'duration_secs': 0.493234} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.340213] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1024.340775] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.340947] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.341639] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1024.341639] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-462fed61-ab5f-4681-a598-916a37734301 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.346684] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1024.346684] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529577dc-e165-a77b-61d5-13b2050d8414" [ 1024.346684] env[62619]: _type = "Task" [ 1024.346684] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.354354] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529577dc-e165-a77b-61d5-13b2050d8414, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.804083] env[62619]: DEBUG nova.network.neutron [-] [instance: f5560f62-634d-42e5-9354-68089db01e33] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.820339] env[62619]: DEBUG nova.network.neutron [req-186b901f-84a7-4356-beb3-5e0d747964fe req-016e83c9-eff6-4cba-a77f-79aba13b6fc7 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Updated VIF entry in instance network info cache for port 91d6b3d0-bc47-408b-8f95-471b793c1330. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1024.820715] env[62619]: DEBUG nova.network.neutron [req-186b901f-84a7-4356-beb3-5e0d747964fe req-016e83c9-eff6-4cba-a77f-79aba13b6fc7 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Updating instance_info_cache with network_info: [{"id": "91d6b3d0-bc47-408b-8f95-471b793c1330", "address": "fa:16:3e:df:d8:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d6b3d0-bc", "ovs_interfaceid": "91d6b3d0-bc47-408b-8f95-471b793c1330", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1024.825325] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103" "released" by "nova.compute.manager.ComputeManager.confirm_resize..do_confirm_resize" :: held 6.813s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1024.859028] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529577dc-e165-a77b-61d5-13b2050d8414, 'name': SearchDatastore_Task, 'duration_secs': 0.010165} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1024.859028] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1024.859342] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1024.859467] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1024.859643] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1024.859796] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1024.860072] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-06dca3aa-a282-494f-9f2c-9e78dd7362d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.870274] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1024.870453] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1024.871159] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a933c13e-5a59-424b-b673-5b5249414fe0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.878301] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1024.878301] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b4e678-92d0-a69b-b060-ee2e0835c413" [ 1024.878301] env[62619]: _type = "Task" [ 1024.878301] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1024.886118] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b4e678-92d0-a69b-b060-ee2e0835c413, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1024.906253] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bce13855-acad-4534-b1d8-4bfba2f95915 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.912708] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a6036b0-8343-4645-bd81-2c879053fd35 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.942783] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0556ebea-5b50-4839-a67d-0039bbb3d606 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.949920] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c4cfbbc-2cd6-4043-8cfb-a29c17a34877 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1024.963367] env[62619]: DEBUG nova.compute.provider_tree [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1025.306954] env[62619]: INFO nova.compute.manager [-] [instance: f5560f62-634d-42e5-9354-68089db01e33] Took 1.24 seconds to deallocate network for instance. [ 1025.323595] env[62619]: DEBUG oslo_concurrency.lockutils [req-186b901f-84a7-4356-beb3-5e0d747964fe req-016e83c9-eff6-4cba-a77f-79aba13b6fc7 service nova] Releasing lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.390073] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52b4e678-92d0-a69b-b060-ee2e0835c413, 'name': SearchDatastore_Task, 'duration_secs': 0.011019} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.390997] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6af2d59f-c8fe-4e53-aac5-593e06f2ad0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.396455] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1025.396455] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5227ba26-56f3-dafc-2b02-8ecbf53299c3" [ 1025.396455] env[62619]: _type = "Task" [ 1025.396455] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.404664] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5227ba26-56f3-dafc-2b02-8ecbf53299c3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.466708] env[62619]: DEBUG nova.scheduler.client.report [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1025.813417] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1025.907068] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5227ba26-56f3-dafc-2b02-8ecbf53299c3, 'name': SearchDatastore_Task, 'duration_secs': 0.0409} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1025.907068] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1025.907459] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 460c12f6-5eb8-427c-bda4-6773e1bc9034/460c12f6-5eb8-427c-bda4-6773e1bc9034.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1025.907764] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-815d3a08-f24b-49c4-bf2f-0206482b8713 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1025.913690] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1025.913690] env[62619]: value = "task-1365342" [ 1025.913690] env[62619]: _type = "Task" [ 1025.913690] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1025.921063] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365342, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1025.972238] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.214s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1025.972898] env[62619]: DEBUG nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1025.975631] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.162s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1025.975853] env[62619]: DEBUG nova.objects.instance [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lazy-loading 'resources' on Instance uuid f5560f62-634d-42e5-9354-68089db01e33 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1026.036844] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "1c406554-d91d-422a-9a5a-9e910fc51103" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.037822] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.037822] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1026.037822] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1026.037822] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1026.040366] env[62619]: INFO nova.compute.manager [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Terminating instance [ 1026.042532] env[62619]: DEBUG nova.compute.manager [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1026.042705] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1026.043644] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72ec9f31-ac14-4097-b46e-121870555ce2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.051332] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1026.051602] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-f85da396-7794-481f-953d-05029b7e2027 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.058012] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 1026.058012] env[62619]: value = "task-1365343" [ 1026.058012] env[62619]: _type = "Task" [ 1026.058012] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.066690] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365343, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.101284] env[62619]: DEBUG nova.compute.manager [req-9247de4f-ae76-4815-a9ef-7339dd0be1fc req-f22f2041-8a81-4242-9d3d-4230f85ad26e service nova] [instance: f5560f62-634d-42e5-9354-68089db01e33] Received event network-vif-deleted-e905f2dd-5836-45a9-b32b-497591e07391 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1026.424295] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365342, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.497969} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.424681] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 460c12f6-5eb8-427c-bda4-6773e1bc9034/460c12f6-5eb8-427c-bda4-6773e1bc9034.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1026.424856] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1026.425156] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-25efefaf-217a-43fd-aa67-1d0b97053747 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.431459] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1026.431459] env[62619]: value = "task-1365344" [ 1026.431459] env[62619]: _type = "Task" [ 1026.431459] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.439281] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365344, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.482976] env[62619]: DEBUG nova.compute.utils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1026.485249] env[62619]: DEBUG nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1026.485249] env[62619]: DEBUG nova.network.neutron [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1026.525889] env[62619]: DEBUG nova.policy [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '5664407959384f1890a3bf8b2c7febcb', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'f414a82396e64d6e8b99e330b0f78112', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1026.567981] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365343, 'name': PowerOffVM_Task, 'duration_secs': 0.427803} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.570398] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1026.570656] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1026.571159] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c5175603-89c1-40c9-8225-5e3da9d79b7d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.613509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3087012b-f70d-4d86-a45f-171e4b418e7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.620512] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-48864657-a297-46c1-b291-df6e227112a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.652638] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f4eee6bd-3b1f-413f-8a6c-4f4dcb931383 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.655431] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1026.655636] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1026.655818] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleting the datastore file [datastore1] 1c406554-d91d-422a-9a5a-9e910fc51103 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1026.656091] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-629e353b-b391-471f-b248-70fc59910b66 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.663156] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-990e465a-82b7-4f9e-9a41-1d518b4c694b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.666961] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for the task: (returnval){ [ 1026.666961] env[62619]: value = "task-1365346" [ 1026.666961] env[62619]: _type = "Task" [ 1026.666961] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.679105] env[62619]: DEBUG nova.compute.provider_tree [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1026.685287] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365346, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1026.778794] env[62619]: DEBUG nova.network.neutron [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Successfully created port: b991507b-e917-4e36-9058-bf35a5777706 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1026.942825] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365344, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.080482} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1026.943315] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1026.943979] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-987013aa-dcee-487b-96f9-67a13ca5d62b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.966101] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] 460c12f6-5eb8-427c-bda4-6773e1bc9034/460c12f6-5eb8-427c-bda4-6773e1bc9034.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1026.966391] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-8cff0175-1b86-4239-aff3-d64102d8bd1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1026.985898] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1026.985898] env[62619]: value = "task-1365347" [ 1026.985898] env[62619]: _type = "Task" [ 1026.985898] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1026.989144] env[62619]: DEBUG nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1026.996832] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365347, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.176718] env[62619]: DEBUG oslo_vmware.api [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Task: {'id': task-1365346, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.200581} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.176899] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1027.177124] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1027.177291] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1027.177470] env[62619]: INFO nova.compute.manager [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1027.177713] env[62619]: DEBUG oslo.service.loopingcall [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1027.177917] env[62619]: DEBUG nova.compute.manager [-] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1027.178022] env[62619]: DEBUG nova.network.neutron [-] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1027.181901] env[62619]: DEBUG nova.scheduler.client.report [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1027.496443] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365347, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1027.686704] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.711s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1027.710122] env[62619]: INFO nova.scheduler.client.report [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted allocations for instance f5560f62-634d-42e5-9354-68089db01e33 [ 1027.899247] env[62619]: DEBUG nova.network.neutron [-] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1027.995875] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365347, 'name': ReconfigVM_Task, 'duration_secs': 0.841953} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1027.996219] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Reconfigured VM instance instance-00000065 to attach disk [datastore1] 460c12f6-5eb8-427c-bda4-6773e1bc9034/460c12f6-5eb8-427c-bda4-6773e1bc9034.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1027.996869] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-68a944ea-a5cf-4391-ac8d-62cb4424b573 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.002787] env[62619]: DEBUG nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1028.006092] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1028.006092] env[62619]: value = "task-1365348" [ 1028.006092] env[62619]: _type = "Task" [ 1028.006092] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.015231] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365348, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.032452] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1028.032698] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1028.032859] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1028.033056] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1028.033209] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1028.033363] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1028.033578] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1028.033783] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1028.033980] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1028.034156] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1028.034331] env[62619]: DEBUG nova.virt.hardware [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1028.035195] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e943dd98-75e2-4305-8d6d-c54181abc3ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.042918] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c31775dd-d981-4580-a7b7-514559f7c3e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.126080] env[62619]: DEBUG nova.compute.manager [req-f4957d73-5fed-4443-89a9-811458be487d req-b5307b5d-0239-4aca-ad83-62e926d0cbb3 service nova] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Received event network-vif-deleted-b2d9c045-8c75-4e70-a8de-8f4587d036a1 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1028.217183] env[62619]: DEBUG oslo_concurrency.lockutils [None req-1e9724bd-9ff9-491c-8b37-95026caca349 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "f5560f62-634d-42e5-9354-68089db01e33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.689s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.251937] env[62619]: DEBUG nova.network.neutron [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Successfully updated port: b991507b-e917-4e36-9058-bf35a5777706 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1028.401448] env[62619]: INFO nova.compute.manager [-] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Took 1.22 seconds to deallocate network for instance. [ 1028.516511] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365348, 'name': Rename_Task, 'duration_secs': 0.180712} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1028.516897] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1028.517053] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-1d0bddfb-26fc-4fb2-a24a-d19ede8f8d88 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1028.525173] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1028.525173] env[62619]: value = "task-1365349" [ 1028.525173] env[62619]: _type = "Task" [ 1028.525173] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1028.532508] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365349, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1028.754986] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquiring lock "refresh_cache-c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1028.755179] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquired lock "refresh_cache-c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1028.755335] env[62619]: DEBUG nova.network.neutron [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1028.908530] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1028.908919] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1028.909179] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1028.934913] env[62619]: INFO nova.scheduler.client.report [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Deleted allocations for instance 1c406554-d91d-422a-9a5a-9e910fc51103 [ 1029.034994] env[62619]: DEBUG oslo_vmware.api [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365349, 'name': PowerOnVM_Task, 'duration_secs': 0.460498} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1029.035286] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1029.035491] env[62619]: INFO nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Took 7.30 seconds to spawn the instance on the hypervisor. [ 1029.035672] env[62619]: DEBUG nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1029.036452] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08848f31-599a-47bc-8264-389fbda82ed2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.285616] env[62619]: DEBUG nova.network.neutron [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1029.404035] env[62619]: DEBUG nova.network.neutron [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Updating instance_info_cache with network_info: [{"id": "b991507b-e917-4e36-9058-bf35a5777706", "address": "fa:16:3e:25:68:f6", "network": {"id": "efd44887-37f3-4d75-a9ec-6c8b79a50893", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-48261007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f414a82396e64d6e8b99e330b0f78112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "170f3b82-5915-4e36-bce9-4664ebb6be5e", "external-id": "nsx-vlan-transportzone-33", "segmentation_id": 33, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb991507b-e9", "ovs_interfaceid": "b991507b-e917-4e36-9058-bf35a5777706", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1029.443023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-9182df95-c741-4915-8a59-0a3e1ce3f04f tempest-DeleteServersTestJSON-1341222198 tempest-DeleteServersTestJSON-1341222198-project-member] Lock "1c406554-d91d-422a-9a5a-9e910fc51103" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.406s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1029.551882] env[62619]: INFO nova.compute.manager [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Took 12.41 seconds to build instance. [ 1029.723660] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "27280d82-22ce-4312-8ff2-216d4364a889" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1029.723959] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "27280d82-22ce-4312-8ff2-216d4364a889" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1029.906383] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Releasing lock "refresh_cache-c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1029.906714] env[62619]: DEBUG nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Instance network_info: |[{"id": "b991507b-e917-4e36-9058-bf35a5777706", "address": "fa:16:3e:25:68:f6", "network": {"id": "efd44887-37f3-4d75-a9ec-6c8b79a50893", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-48261007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f414a82396e64d6e8b99e330b0f78112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "170f3b82-5915-4e36-bce9-4664ebb6be5e", "external-id": "nsx-vlan-transportzone-33", "segmentation_id": 33, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb991507b-e9", "ovs_interfaceid": "b991507b-e917-4e36-9058-bf35a5777706", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1029.907169] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:25:68:f6', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '170f3b82-5915-4e36-bce9-4664ebb6be5e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b991507b-e917-4e36-9058-bf35a5777706', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1029.914463] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Creating folder: Project (f414a82396e64d6e8b99e330b0f78112). Parent ref: group-v290436. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1029.914740] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-123063c1-039a-4fc8-829a-4a57f9bf5a38 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.925403] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Created folder: Project (f414a82396e64d6e8b99e330b0f78112) in parent group-v290436. [ 1029.925581] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Creating folder: Instances. Parent ref: group-v290552. {{(pid=62619) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 1029.925827] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-79d10b92-28d8-42dd-9fc5-83b0a9a1944b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.934557] env[62619]: INFO nova.virt.vmwareapi.vm_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Created folder: Instances in parent group-v290552. [ 1029.934788] env[62619]: DEBUG oslo.service.loopingcall [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1029.934973] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1029.935185] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-91bdf2cf-141c-414b-a7ba-7a5e423f0733 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1029.953968] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1029.953968] env[62619]: value = "task-1365352" [ 1029.953968] env[62619]: _type = "Task" [ 1029.953968] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1029.961507] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365352, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.054270] env[62619]: DEBUG oslo_concurrency.lockutils [None req-52e4fe1b-340b-4077-8b28-3eda8dbf8994 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.921s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.153180] env[62619]: DEBUG nova.compute.manager [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Received event network-vif-plugged-b991507b-e917-4e36-9058-bf35a5777706 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.153416] env[62619]: DEBUG oslo_concurrency.lockutils [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] Acquiring lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.153626] env[62619]: DEBUG oslo_concurrency.lockutils [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] Lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.154108] env[62619]: DEBUG oslo_concurrency.lockutils [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] Lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1030.154379] env[62619]: DEBUG nova.compute.manager [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] No waiting events found dispatching network-vif-plugged-b991507b-e917-4e36-9058-bf35a5777706 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1030.154622] env[62619]: WARNING nova.compute.manager [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Received unexpected event network-vif-plugged-b991507b-e917-4e36-9058-bf35a5777706 for instance with vm_state building and task_state spawning. [ 1030.154967] env[62619]: DEBUG nova.compute.manager [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Received event network-changed-b991507b-e917-4e36-9058-bf35a5777706 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.155061] env[62619]: DEBUG nova.compute.manager [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Refreshing instance network info cache due to event network-changed-b991507b-e917-4e36-9058-bf35a5777706. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1030.155694] env[62619]: DEBUG oslo_concurrency.lockutils [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] Acquiring lock "refresh_cache-c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.155923] env[62619]: DEBUG oslo_concurrency.lockutils [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] Acquired lock "refresh_cache-c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.156206] env[62619]: DEBUG nova.network.neutron [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Refreshing network info cache for port b991507b-e917-4e36-9058-bf35a5777706 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1030.226444] env[62619]: DEBUG nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1030.464604] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365352, 'name': CreateVM_Task, 'duration_secs': 0.333642} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.464864] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1030.465503] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.465683] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.466359] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1030.466359] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ffa1679-3b22-4d4d-98ee-45e5cd39ba41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.470799] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1030.470799] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52939ed2-d755-cb0d-8e3b-4768cd177c61" [ 1030.470799] env[62619]: _type = "Task" [ 1030.470799] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1030.478502] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52939ed2-d755-cb0d-8e3b-4768cd177c61, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1030.535521] env[62619]: DEBUG nova.compute.manager [req-391f6031-1c35-4653-9f0e-4fa0f23a3bde req-e7136661-fb4f-4122-8bab-51ab5990cd60 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Received event network-changed-91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1030.535743] env[62619]: DEBUG nova.compute.manager [req-391f6031-1c35-4653-9f0e-4fa0f23a3bde req-e7136661-fb4f-4122-8bab-51ab5990cd60 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Refreshing instance network info cache due to event network-changed-91d6b3d0-bc47-408b-8f95-471b793c1330. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1030.535870] env[62619]: DEBUG oslo_concurrency.lockutils [req-391f6031-1c35-4653-9f0e-4fa0f23a3bde req-e7136661-fb4f-4122-8bab-51ab5990cd60 service nova] Acquiring lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.536242] env[62619]: DEBUG oslo_concurrency.lockutils [req-391f6031-1c35-4653-9f0e-4fa0f23a3bde req-e7136661-fb4f-4122-8bab-51ab5990cd60 service nova] Acquired lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.536389] env[62619]: DEBUG nova.network.neutron [req-391f6031-1c35-4653-9f0e-4fa0f23a3bde req-e7136661-fb4f-4122-8bab-51ab5990cd60 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Refreshing network info cache for port 91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1030.730243] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.730551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.750581] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1030.750852] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1030.752371] env[62619]: INFO nova.compute.claims [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1030.834953] env[62619]: DEBUG nova.objects.instance [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lazy-loading 'flavor' on Instance uuid 69e916ee-9edc-4e1b-85a0-40142364e3bb {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1030.982594] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52939ed2-d755-cb0d-8e3b-4768cd177c61, 'name': SearchDatastore_Task, 'duration_secs': 0.009496} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1030.985214] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1030.985214] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1030.985214] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1030.985214] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1030.985214] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1030.985214] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c3c910e-aafb-49ad-878b-10b210971684 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.992954] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1030.993282] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1030.993997] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0906f3ca-ae9f-4319-9992-39853e6b1082 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1030.999363] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1030.999363] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52bad875-ce40-ddb8-1524-07b73fc0b344" [ 1030.999363] env[62619]: _type = "Task" [ 1030.999363] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.007954] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52bad875-ce40-ddb8-1524-07b73fc0b344, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.235726] env[62619]: DEBUG nova.compute.utils [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1031.243701] env[62619]: DEBUG nova.network.neutron [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Updated VIF entry in instance network info cache for port b991507b-e917-4e36-9058-bf35a5777706. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1031.245016] env[62619]: DEBUG nova.network.neutron [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Updating instance_info_cache with network_info: [{"id": "b991507b-e917-4e36-9058-bf35a5777706", "address": "fa:16:3e:25:68:f6", "network": {"id": "efd44887-37f3-4d75-a9ec-6c8b79a50893", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-48261007-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "f414a82396e64d6e8b99e330b0f78112", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "170f3b82-5915-4e36-bce9-4664ebb6be5e", "external-id": "nsx-vlan-transportzone-33", "segmentation_id": 33, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb991507b-e9", "ovs_interfaceid": "b991507b-e917-4e36-9058-bf35a5777706", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.339951] env[62619]: DEBUG oslo_concurrency.lockutils [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1031.346024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquired lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1031.510049] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52bad875-ce40-ddb8-1524-07b73fc0b344, 'name': SearchDatastore_Task, 'duration_secs': 0.00837} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1031.511061] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-64dd0c20-4ec1-4fc9-ab73-296337b9eb84 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.517257] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1031.517257] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525b416d-cce0-7eea-25ae-eb56f569d055" [ 1031.517257] env[62619]: _type = "Task" [ 1031.517257] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1031.527028] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525b416d-cce0-7eea-25ae-eb56f569d055, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1031.561720] env[62619]: DEBUG nova.network.neutron [req-391f6031-1c35-4653-9f0e-4fa0f23a3bde req-e7136661-fb4f-4122-8bab-51ab5990cd60 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Updated VIF entry in instance network info cache for port 91d6b3d0-bc47-408b-8f95-471b793c1330. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1031.562117] env[62619]: DEBUG nova.network.neutron [req-391f6031-1c35-4653-9f0e-4fa0f23a3bde req-e7136661-fb4f-4122-8bab-51ab5990cd60 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Updating instance_info_cache with network_info: [{"id": "91d6b3d0-bc47-408b-8f95-471b793c1330", "address": "fa:16:3e:df:d8:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.252", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap91d6b3d0-bc", "ovs_interfaceid": "91d6b3d0-bc47-408b-8f95-471b793c1330", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1031.738797] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1031.750114] env[62619]: DEBUG oslo_concurrency.lockutils [req-4d8c85ea-5d9d-49c4-83a0-00d777db4025 req-47e8315d-67a8-4e9a-a4d3-8acbea817a3f service nova] Releasing lock "refresh_cache-c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1031.918744] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cec396fb-04bd-4c55-a6f4-d67d8851313d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.926754] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d905b710-a64b-4bca-bfa5-87c9bbda8e77 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.963420] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f7388a-5fb9-49c3-8653-1bd38fdc53c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.968335] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0262cd1b-6871-4ced-a4f8-ac79950b0a4b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1031.982360] env[62619]: DEBUG nova.compute.provider_tree [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1031.984275] env[62619]: DEBUG nova.network.neutron [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1032.029251] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]525b416d-cce0-7eea-25ae-eb56f569d055, 'name': SearchDatastore_Task, 'duration_secs': 0.014359} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.029520] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.029770] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] c0cbe0ed-94e0-4d02-ae7a-2589938f4c41/c0cbe0ed-94e0-4d02-ae7a-2589938f4c41.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1032.030129] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-97a53be8-d653-4209-b7b6-648cee8d4ca6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.037668] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1032.037668] env[62619]: value = "task-1365353" [ 1032.037668] env[62619]: _type = "Task" [ 1032.037668] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.045460] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365353, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.064769] env[62619]: DEBUG oslo_concurrency.lockutils [req-391f6031-1c35-4653-9f0e-4fa0f23a3bde req-e7136661-fb4f-4122-8bab-51ab5990cd60 service nova] Releasing lock "refresh_cache-460c12f6-5eb8-427c-bda4-6773e1bc9034" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1032.179044] env[62619]: DEBUG nova.compute.manager [req-13ca2b58-9ba2-4a22-a1dc-8885b6e6e735 req-bb23ba2c-739f-4a09-9a39-f6af9326ff41 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Received event network-changed-b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1032.179292] env[62619]: DEBUG nova.compute.manager [req-13ca2b58-9ba2-4a22-a1dc-8885b6e6e735 req-bb23ba2c-739f-4a09-9a39-f6af9326ff41 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Refreshing instance network info cache due to event network-changed-b8ca3257-e811-48cd-ac4a-662b49bf41f3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1032.179563] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ca2b58-9ba2-4a22-a1dc-8885b6e6e735 req-bb23ba2c-739f-4a09-9a39-f6af9326ff41 service nova] Acquiring lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1032.487643] env[62619]: DEBUG nova.scheduler.client.report [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1032.549667] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365353, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.467978} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1032.549667] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] c0cbe0ed-94e0-4d02-ae7a-2589938f4c41/c0cbe0ed-94e0-4d02-ae7a-2589938f4c41.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1032.549956] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1032.550170] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-ab20e5b2-6d7f-4632-b903-e13c976b4747 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.557111] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1032.557111] env[62619]: value = "task-1365354" [ 1032.557111] env[62619]: _type = "Task" [ 1032.557111] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1032.564754] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365354, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1032.751087] env[62619]: DEBUG nova.network.neutron [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updating instance_info_cache with network_info: [{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1032.832609] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1032.832879] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1032.833136] env[62619]: INFO nova.compute.manager [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Attaching volume 1192b48d-685e-4291-a41c-ab6536cdc58e to /dev/sdb [ 1032.868798] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92e6e8ea-744b-437d-85dc-79a35488af27 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.875841] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af368c26-d1aa-4b7e-bbb7-0f01500dae05 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1032.890464] env[62619]: DEBUG nova.virt.block_device [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating existing volume attachment record: 6fde9924-6d1d-4866-bae5-20a43b7580ed {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1032.996304] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.245s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1032.996611] env[62619]: DEBUG nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1033.064015] env[62619]: DEBUG nova.objects.instance [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lazy-loading 'flavor' on Instance uuid 69e916ee-9edc-4e1b-85a0-40142364e3bb {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1033.068820] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365354, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.316664} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.069128] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1033.069953] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1434c8e2-7aaf-4abc-a021-1cfca79019c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.092615] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Reconfiguring VM instance instance-00000066 to attach disk [datastore1] c0cbe0ed-94e0-4d02-ae7a-2589938f4c41/c0cbe0ed-94e0-4d02-ae7a-2589938f4c41.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1033.093978] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-baddba0e-280c-48d1-944f-19c4466f4b23 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.113763] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1033.113763] env[62619]: value = "task-1365356" [ 1033.113763] env[62619]: _type = "Task" [ 1033.113763] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.121978] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365356, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.255137] env[62619]: DEBUG oslo_concurrency.lockutils [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Releasing lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1033.255204] env[62619]: DEBUG nova.compute.manager [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Inject network info {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 1033.255479] env[62619]: DEBUG nova.compute.manager [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] network_info to inject: |[{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1033.261308] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Reconfiguring VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1033.261735] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ca2b58-9ba2-4a22-a1dc-8885b6e6e735 req-bb23ba2c-739f-4a09-9a39-f6af9326ff41 service nova] Acquired lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1033.261935] env[62619]: DEBUG nova.network.neutron [req-13ca2b58-9ba2-4a22-a1dc-8885b6e6e735 req-bb23ba2c-739f-4a09-9a39-f6af9326ff41 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Refreshing network info cache for port b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1033.263232] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-ddfc3186-3a2a-4b1f-acd1-e47b940d00d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.286958] env[62619]: DEBUG oslo_vmware.api [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 1033.286958] env[62619]: value = "task-1365359" [ 1033.286958] env[62619]: _type = "Task" [ 1033.286958] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.298934] env[62619]: DEBUG oslo_vmware.api [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365359, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.502462] env[62619]: DEBUG nova.compute.utils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1033.503523] env[62619]: DEBUG nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1033.503611] env[62619]: DEBUG nova.network.neutron [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1033.511822] env[62619]: DEBUG nova.network.neutron [req-13ca2b58-9ba2-4a22-a1dc-8885b6e6e735 req-bb23ba2c-739f-4a09-9a39-f6af9326ff41 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updated VIF entry in instance network info cache for port b8ca3257-e811-48cd-ac4a-662b49bf41f3. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1033.512681] env[62619]: DEBUG nova.network.neutron [req-13ca2b58-9ba2-4a22-a1dc-8885b6e6e735 req-bb23ba2c-739f-4a09-9a39-f6af9326ff41 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updating instance_info_cache with network_info: [{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}, {"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1033.549660] env[62619]: DEBUG nova.policy [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3581d242034a4c8b00417545bd52ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61b5e689e5544e6857baf8d3c52fe0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1033.573483] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1033.624142] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365356, 'name': ReconfigVM_Task, 'duration_secs': 0.277819} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.624521] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Reconfigured VM instance instance-00000066 to attach disk [datastore1] c0cbe0ed-94e0-4d02-ae7a-2589938f4c41/c0cbe0ed-94e0-4d02-ae7a-2589938f4c41.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1033.625174] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-a15a43c6-44d7-4258-b02d-3401d398587b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1033.631628] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1033.631628] env[62619]: value = "task-1365360" [ 1033.631628] env[62619]: _type = "Task" [ 1033.631628] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1033.639667] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365360, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1033.803127] env[62619]: DEBUG oslo_vmware.api [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365359, 'name': ReconfigVM_Task, 'duration_secs': 0.195971} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1033.804346] env[62619]: DEBUG nova.network.neutron [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Successfully created port: 8d9fe820-185c-4a3a-9436-2493bb0a0cb3 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1033.807208] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-552728a1-5938-462d-9005-9cf3145d8010 tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Reconfigured VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1034.007266] env[62619]: DEBUG nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1034.016750] env[62619]: DEBUG oslo_concurrency.lockutils [req-13ca2b58-9ba2-4a22-a1dc-8885b6e6e735 req-bb23ba2c-739f-4a09-9a39-f6af9326ff41 service nova] Releasing lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1034.017166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquired lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1034.141170] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365360, 'name': Rename_Task, 'duration_secs': 0.138134} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.141467] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1034.141711] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-72e5187e-60e4-411f-8843-4b4ae528769d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1034.147383] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1034.147383] env[62619]: value = "task-1365361" [ 1034.147383] env[62619]: _type = "Task" [ 1034.147383] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1034.154649] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365361, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1034.310058] env[62619]: DEBUG nova.network.neutron [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1034.336797] env[62619]: DEBUG nova.compute.manager [req-1c18338e-fde7-4dbb-b9d2-0086ee791d87 req-019c162f-472c-45fa-abdb-c376f8e47ffd service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Received event network-changed-b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1034.337149] env[62619]: DEBUG nova.compute.manager [req-1c18338e-fde7-4dbb-b9d2-0086ee791d87 req-019c162f-472c-45fa-abdb-c376f8e47ffd service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Refreshing instance network info cache due to event network-changed-b8ca3257-e811-48cd-ac4a-662b49bf41f3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1034.337414] env[62619]: DEBUG oslo_concurrency.lockutils [req-1c18338e-fde7-4dbb-b9d2-0086ee791d87 req-019c162f-472c-45fa-abdb-c376f8e47ffd service nova] Acquiring lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1034.659611] env[62619]: DEBUG oslo_vmware.api [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365361, 'name': PowerOnVM_Task, 'duration_secs': 0.435675} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1034.659870] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1034.660150] env[62619]: INFO nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Took 6.66 seconds to spawn the instance on the hypervisor. [ 1034.660397] env[62619]: DEBUG nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1034.661423] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a48424c-9769-4290-8623-3941314fc807 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.019174] env[62619]: DEBUG nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1035.055310] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1035.055591] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1035.055755] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1035.055976] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1035.056174] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1035.056331] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1035.056543] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1035.056709] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1035.056879] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1035.057072] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1035.057257] env[62619]: DEBUG nova.virt.hardware [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1035.058132] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f83e732f-b1a0-4e4b-a817-b37939643350 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.065933] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17286850-863d-4acf-8f17-835d769939ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.070411] env[62619]: DEBUG nova.network.neutron [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updating instance_info_cache with network_info: [{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1035.185063] env[62619]: INFO nova.compute.manager [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Took 12.15 seconds to build instance. [ 1035.319196] env[62619]: DEBUG nova.network.neutron [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Successfully updated port: 8d9fe820-185c-4a3a-9436-2493bb0a0cb3 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1035.573994] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Releasing lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1035.574351] env[62619]: DEBUG nova.compute.manager [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Inject network info {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7219}} [ 1035.574640] env[62619]: DEBUG nova.compute.manager [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] network_info to inject: |[{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _inject_network_info /opt/stack/nova/nova/compute/manager.py:7220}} [ 1035.579513] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Reconfiguring VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1796}} [ 1035.579701] env[62619]: DEBUG oslo_concurrency.lockutils [req-1c18338e-fde7-4dbb-b9d2-0086ee791d87 req-019c162f-472c-45fa-abdb-c376f8e47ffd service nova] Acquired lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.579777] env[62619]: DEBUG nova.network.neutron [req-1c18338e-fde7-4dbb-b9d2-0086ee791d87 req-019c162f-472c-45fa-abdb-c376f8e47ffd service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Refreshing network info cache for port b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1035.581038] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9abb5996-931f-4efb-b2c3-b1d0af1433fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.599847] env[62619]: DEBUG oslo_vmware.api [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 1035.599847] env[62619]: value = "task-1365363" [ 1035.599847] env[62619]: _type = "Task" [ 1035.599847] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.608053] env[62619]: DEBUG oslo_vmware.api [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365363, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.686124] env[62619]: DEBUG oslo_concurrency.lockutils [None req-573f4ea4-2028-464b-a480-152ddbf07a2a tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.667s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.716844] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "69e916ee-9edc-4e1b-85a0-40142364e3bb" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.717299] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "69e916ee-9edc-4e1b-85a0-40142364e3bb" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.717577] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "69e916ee-9edc-4e1b-85a0-40142364e3bb-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1035.717819] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "69e916ee-9edc-4e1b-85a0-40142364e3bb-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1035.718062] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "69e916ee-9edc-4e1b-85a0-40142364e3bb-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1035.720518] env[62619]: INFO nova.compute.manager [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Terminating instance [ 1035.722602] env[62619]: DEBUG nova.compute.manager [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1035.723190] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1035.723894] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-97175816-8109-4119-8fb9-dd4876ee24a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.733128] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1035.733285] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11bd4cf3-9be9-422d-b632-f467ac3e23a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1035.738840] env[62619]: DEBUG oslo_vmware.api [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 1035.738840] env[62619]: value = "task-1365364" [ 1035.738840] env[62619]: _type = "Task" [ 1035.738840] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1035.746308] env[62619]: DEBUG oslo_vmware.api [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365364, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1035.822206] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "refresh_cache-27280d82-22ce-4312-8ff2-216d4364a889" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1035.822361] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "refresh_cache-27280d82-22ce-4312-8ff2-216d4364a889" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1035.822525] env[62619]: DEBUG nova.network.neutron [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1036.109314] env[62619]: DEBUG oslo_vmware.api [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365363, 'name': ReconfigVM_Task, 'duration_secs': 0.17777} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.109730] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8698f222-2b95-41f2-bed3-3b4c4897f5aa tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Reconfigured VM instance to set the machine id {{(pid=62619) _set_machine_id /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1799}} [ 1036.248675] env[62619]: DEBUG oslo_vmware.api [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365364, 'name': PowerOffVM_Task, 'duration_secs': 0.435387} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.248961] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1036.249257] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1036.249526] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-95224ef6-233a-4db4-870a-00d084b62680 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.319471] env[62619]: DEBUG nova.network.neutron [req-1c18338e-fde7-4dbb-b9d2-0086ee791d87 req-019c162f-472c-45fa-abdb-c376f8e47ffd service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updated VIF entry in instance network info cache for port b8ca3257-e811-48cd-ac4a-662b49bf41f3. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1036.319875] env[62619]: DEBUG nova.network.neutron [req-1c18338e-fde7-4dbb-b9d2-0086ee791d87 req-019c162f-472c-45fa-abdb-c376f8e47ffd service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updating instance_info_cache with network_info: [{"id": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "address": "fa:16:3e:a6:f3:c6", "network": {"id": "fb149fa2-123b-42b5-8a4c-bd9f63d5cca1", "bridge": "br-int", "label": "tempest-AttachInterfacesUnderV243Test-1334895141-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.237", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "61cbb5aaa69e48b4a6c820b898bbe734", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4406a73e-2189-46ac-9e96-4f0af80b5094", "external-id": "nsx-vlan-transportzone-601", "segmentation_id": 601, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb8ca3257-e8", "ovs_interfaceid": "b8ca3257-e811-48cd-ac4a-662b49bf41f3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.334339] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1036.334571] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1036.334757] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Deleting the datastore file [datastore1] 69e916ee-9edc-4e1b-85a0-40142364e3bb {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1036.335085] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6e01cd68-1e47-44ac-af84-694c3cb164bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.341555] env[62619]: DEBUG oslo_vmware.api [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for the task: (returnval){ [ 1036.341555] env[62619]: value = "task-1365366" [ 1036.341555] env[62619]: _type = "Task" [ 1036.341555] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.349445] env[62619]: DEBUG oslo_vmware.api [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365366, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.355801] env[62619]: DEBUG nova.network.neutron [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1036.364320] env[62619]: DEBUG nova.compute.manager [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Received event network-vif-plugged-8d9fe820-185c-4a3a-9436-2493bb0a0cb3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.364595] env[62619]: DEBUG oslo_concurrency.lockutils [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] Acquiring lock "27280d82-22ce-4312-8ff2-216d4364a889-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.364877] env[62619]: DEBUG oslo_concurrency.lockutils [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] Lock "27280d82-22ce-4312-8ff2-216d4364a889-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.365147] env[62619]: DEBUG oslo_concurrency.lockutils [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] Lock "27280d82-22ce-4312-8ff2-216d4364a889-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.365381] env[62619]: DEBUG nova.compute.manager [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] No waiting events found dispatching network-vif-plugged-8d9fe820-185c-4a3a-9436-2493bb0a0cb3 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1036.365596] env[62619]: WARNING nova.compute.manager [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Received unexpected event network-vif-plugged-8d9fe820-185c-4a3a-9436-2493bb0a0cb3 for instance with vm_state building and task_state spawning. [ 1036.365803] env[62619]: DEBUG nova.compute.manager [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Received event network-changed-8d9fe820-185c-4a3a-9436-2493bb0a0cb3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1036.366009] env[62619]: DEBUG nova.compute.manager [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Refreshing instance network info cache due to event network-changed-8d9fe820-185c-4a3a-9436-2493bb0a0cb3. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1036.366237] env[62619]: DEBUG oslo_concurrency.lockutils [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] Acquiring lock "refresh_cache-27280d82-22ce-4312-8ff2-216d4364a889" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1036.458699] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquiring lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.459017] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.459250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquiring lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1036.459511] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1036.459801] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1036.462486] env[62619]: INFO nova.compute.manager [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Terminating instance [ 1036.466148] env[62619]: DEBUG nova.compute.manager [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1036.466409] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1036.467453] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d76b87a-8981-4d38-9255-6900682e1718 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.476045] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1036.476352] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-4251d337-29bf-4a75-a1ee-ce6a7408e181 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1036.483369] env[62619]: DEBUG oslo_vmware.api [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1036.483369] env[62619]: value = "task-1365367" [ 1036.483369] env[62619]: _type = "Task" [ 1036.483369] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1036.487858] env[62619]: DEBUG nova.network.neutron [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Updating instance_info_cache with network_info: [{"id": "8d9fe820-185c-4a3a-9436-2493bb0a0cb3", "address": "fa:16:3e:60:9a:45", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d9fe820-18", "ovs_interfaceid": "8d9fe820-185c-4a3a-9436-2493bb0a0cb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1036.494376] env[62619]: DEBUG oslo_vmware.api [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365367, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1036.823361] env[62619]: DEBUG oslo_concurrency.lockutils [req-1c18338e-fde7-4dbb-b9d2-0086ee791d87 req-019c162f-472c-45fa-abdb-c376f8e47ffd service nova] Releasing lock "refresh_cache-69e916ee-9edc-4e1b-85a0-40142364e3bb" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.852071] env[62619]: DEBUG oslo_vmware.api [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Task: {'id': task-1365366, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.15541} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.852345] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1036.852576] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1036.852792] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1036.853013] env[62619]: INFO nova.compute.manager [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1036.853322] env[62619]: DEBUG oslo.service.loopingcall [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1036.853555] env[62619]: DEBUG nova.compute.manager [-] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1036.853680] env[62619]: DEBUG nova.network.neutron [-] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1036.993180] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "refresh_cache-27280d82-22ce-4312-8ff2-216d4364a889" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1036.994574] env[62619]: DEBUG nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Instance network_info: |[{"id": "8d9fe820-185c-4a3a-9436-2493bb0a0cb3", "address": "fa:16:3e:60:9a:45", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d9fe820-18", "ovs_interfaceid": "8d9fe820-185c-4a3a-9436-2493bb0a0cb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1036.994574] env[62619]: DEBUG oslo_vmware.api [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365367, 'name': PowerOffVM_Task, 'duration_secs': 0.264015} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1036.994574] env[62619]: DEBUG oslo_concurrency.lockutils [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] Acquired lock "refresh_cache-27280d82-22ce-4312-8ff2-216d4364a889" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1036.994574] env[62619]: DEBUG nova.network.neutron [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Refreshing network info cache for port 8d9fe820-185c-4a3a-9436-2493bb0a0cb3 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1036.995846] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:60:9a:45', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8d9fe820-185c-4a3a-9436-2493bb0a0cb3', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1037.002903] env[62619]: DEBUG oslo.service.loopingcall [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.003134] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1037.003304] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1037.006201] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1037.006416] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0fd7e00c-dae8-48fa-8934-c6d16155c2f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.008215] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34100bc3-1115-4fb0-849f-145bab0746ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.028808] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1037.028808] env[62619]: value = "task-1365369" [ 1037.028808] env[62619]: _type = "Task" [ 1037.028808] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.036464] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365369, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.134057] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1037.134409] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1037.134694] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Deleting the datastore file [datastore1] c0cbe0ed-94e0-4d02-ae7a-2589938f4c41 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1037.135074] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7db5c08c-0171-4170-98ab-e1de01dddcc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.142643] env[62619]: DEBUG oslo_vmware.api [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for the task: (returnval){ [ 1037.142643] env[62619]: value = "task-1365370" [ 1037.142643] env[62619]: _type = "Task" [ 1037.142643] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.150995] env[62619]: DEBUG oslo_vmware.api [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365370, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.433185] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1037.433469] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290556', 'volume_id': '1192b48d-685e-4291-a41c-ab6536cdc58e', 'name': 'volume-1192b48d-685e-4291-a41c-ab6536cdc58e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2', 'attached_at': '', 'detached_at': '', 'volume_id': '1192b48d-685e-4291-a41c-ab6536cdc58e', 'serial': '1192b48d-685e-4291-a41c-ab6536cdc58e'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1037.434412] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-353f6301-70d7-4519-b86b-eade3a6cc443 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.449822] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3859a21d-f0da-43cf-9a90-e699c2375cf2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.473770] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] volume-1192b48d-685e-4291-a41c-ab6536cdc58e/volume-1192b48d-685e-4291-a41c-ab6536cdc58e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1037.474165] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7b8e2116-09fa-403a-ac80-cc3f2d754eb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.491905] env[62619]: DEBUG oslo_vmware.api [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1037.491905] env[62619]: value = "task-1365371" [ 1037.491905] env[62619]: _type = "Task" [ 1037.491905] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.503016] env[62619]: DEBUG oslo_vmware.api [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365371, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.541060] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365369, 'name': CreateVM_Task, 'duration_secs': 0.393146} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.541281] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1037.542036] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1037.542232] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1037.542599] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1037.542875] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc320bfe-0d43-4a5f-b805-c8acebf4ac43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1037.547531] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1037.547531] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520c0a81-14d5-1c72-75b2-27138a324634" [ 1037.547531] env[62619]: _type = "Task" [ 1037.547531] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1037.551425] env[62619]: DEBUG nova.network.neutron [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Updated VIF entry in instance network info cache for port 8d9fe820-185c-4a3a-9436-2493bb0a0cb3. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1037.551822] env[62619]: DEBUG nova.network.neutron [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Updating instance_info_cache with network_info: [{"id": "8d9fe820-185c-4a3a-9436-2493bb0a0cb3", "address": "fa:16:3e:60:9a:45", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8d9fe820-18", "ovs_interfaceid": "8d9fe820-185c-4a3a-9436-2493bb0a0cb3", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1037.557556] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520c0a81-14d5-1c72-75b2-27138a324634, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1037.652313] env[62619]: DEBUG oslo_vmware.api [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Task: {'id': task-1365370, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.143463} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1037.652574] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1037.652783] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1037.653044] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1037.653238] env[62619]: INFO nova.compute.manager [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1037.653479] env[62619]: DEBUG oslo.service.loopingcall [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1037.653665] env[62619]: DEBUG nova.compute.manager [-] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1037.653745] env[62619]: DEBUG nova.network.neutron [-] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1038.001739] env[62619]: DEBUG oslo_vmware.api [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365371, 'name': ReconfigVM_Task, 'duration_secs': 0.332297} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.002110] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfigured VM instance instance-0000005e to attach disk [datastore1] volume-1192b48d-685e-4291-a41c-ab6536cdc58e/volume-1192b48d-685e-4291-a41c-ab6536cdc58e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1038.006670] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-126783ff-6e66-4bab-bb26-55b1881189d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.021434] env[62619]: DEBUG oslo_vmware.api [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1038.021434] env[62619]: value = "task-1365372" [ 1038.021434] env[62619]: _type = "Task" [ 1038.021434] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.029524] env[62619]: DEBUG oslo_vmware.api [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365372, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.057078] env[62619]: DEBUG oslo_concurrency.lockutils [req-d13851dd-4100-4900-9fad-339932c101af req-b2e71b1e-5caa-44a2-a3a5-07cab1c005bd service nova] Releasing lock "refresh_cache-27280d82-22ce-4312-8ff2-216d4364a889" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.057516] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520c0a81-14d5-1c72-75b2-27138a324634, 'name': SearchDatastore_Task, 'duration_secs': 0.011749} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.057808] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1038.058061] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1038.058323] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1038.058486] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1038.058666] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1038.058930] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f6ed31cb-e9d4-4212-b58a-7866c63cf276 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.065924] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1038.066146] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1038.066853] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9b4a2f98-6c6d-44e8-940e-34ced186f1d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.071837] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1038.071837] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5246b4d0-e9bf-5759-d30f-4e5fc045ae65" [ 1038.071837] env[62619]: _type = "Task" [ 1038.071837] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.079783] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5246b4d0-e9bf-5759-d30f-4e5fc045ae65, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.131244] env[62619]: DEBUG nova.network.neutron [-] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.371298] env[62619]: DEBUG nova.network.neutron [-] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1038.397591] env[62619]: DEBUG nova.compute.manager [req-8e2a6c5b-fda9-4ff7-a4a2-adf432151b48 req-ec4bb71a-41dc-48d9-856d-c8f79c7a06e3 service nova] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Received event network-vif-deleted-b8ca3257-e811-48cd-ac4a-662b49bf41f3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.397820] env[62619]: DEBUG nova.compute.manager [req-8e2a6c5b-fda9-4ff7-a4a2-adf432151b48 req-ec4bb71a-41dc-48d9-856d-c8f79c7a06e3 service nova] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Received event network-vif-deleted-b991507b-e917-4e36-9058-bf35a5777706 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1038.531379] env[62619]: DEBUG oslo_vmware.api [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365372, 'name': ReconfigVM_Task, 'duration_secs': 0.205296} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.531710] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290556', 'volume_id': '1192b48d-685e-4291-a41c-ab6536cdc58e', 'name': 'volume-1192b48d-685e-4291-a41c-ab6536cdc58e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2', 'attached_at': '', 'detached_at': '', 'volume_id': '1192b48d-685e-4291-a41c-ab6536cdc58e', 'serial': '1192b48d-685e-4291-a41c-ab6536cdc58e'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1038.583053] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5246b4d0-e9bf-5759-d30f-4e5fc045ae65, 'name': SearchDatastore_Task, 'duration_secs': 0.008141} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1038.583419] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-307dfe8c-2ef5-41ab-b455-08dc396cc92b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1038.588489] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1038.588489] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5215c2e2-bbc3-5466-0cdf-aa50a88f1bae" [ 1038.588489] env[62619]: _type = "Task" [ 1038.588489] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1038.597149] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5215c2e2-bbc3-5466-0cdf-aa50a88f1bae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1038.634269] env[62619]: INFO nova.compute.manager [-] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Took 1.78 seconds to deallocate network for instance. [ 1038.874327] env[62619]: INFO nova.compute.manager [-] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Took 1.22 seconds to deallocate network for instance. [ 1039.100557] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5215c2e2-bbc3-5466-0cdf-aa50a88f1bae, 'name': SearchDatastore_Task, 'duration_secs': 0.008474} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1039.100975] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1039.101322] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 27280d82-22ce-4312-8ff2-216d4364a889/27280d82-22ce-4312-8ff2-216d4364a889.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1039.102068] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-72cbab81-c1b7-4a10-a761-cc28115b2d67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.109064] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1039.109064] env[62619]: value = "task-1365373" [ 1039.109064] env[62619]: _type = "Task" [ 1039.109064] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.118571] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365373, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.141285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.141666] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.141886] env[62619]: DEBUG nova.objects.instance [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lazy-loading 'resources' on Instance uuid 69e916ee-9edc-4e1b-85a0-40142364e3bb {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.323998] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.324177] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1039.381380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1039.574417] env[62619]: DEBUG nova.objects.instance [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lazy-loading 'flavor' on Instance uuid 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1039.618992] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365373, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1039.769223] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7e1acdde-3f19-4dff-8049-a14c57facfd3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.777017] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3e41db-2270-4684-a762-661423d99ff2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.808874] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e33efe-e540-4394-b117-a01fab6d56e5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.816539] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d10eeb-0f68-43aa-ad5f-dd50c56c3846 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.829804] env[62619]: INFO nova.compute.manager [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Detaching volume f9a35717-66fa-4d29-9e27-0e5ac0f81b7e [ 1039.831761] env[62619]: DEBUG nova.compute.provider_tree [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1039.863638] env[62619]: INFO nova.virt.block_device [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Attempting to driver detach volume f9a35717-66fa-4d29-9e27-0e5ac0f81b7e from mountpoint /dev/sdb [ 1039.863638] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1039.863863] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290542', 'volume_id': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'name': 'volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '74aa214a-7eda-4613-a394-bc7477d3078e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'serial': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1039.864831] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d6a10b4b-e5ce-4bc6-99d3-c71c0edaf7a1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.886509] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec4e5bc4-7937-425d-b7ea-259f91892b8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.893512] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2fc839f4-00f8-4c94-bc94-791ca4356d22 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.913691] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-391a9587-5819-4da1-aa6c-36b36fef5011 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.930116] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] The volume has not been displaced from its original location: [datastore1] volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e/volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1039.935388] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Reconfiguring VM instance instance-00000059 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1039.935748] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6d0d902-0895-40ec-b13a-6018afe7d954 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1039.953820] env[62619]: DEBUG oslo_vmware.api [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1039.953820] env[62619]: value = "task-1365374" [ 1039.953820] env[62619]: _type = "Task" [ 1039.953820] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1039.961768] env[62619]: DEBUG oslo_vmware.api [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365374, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.081551] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5901ccf1-dc27-44f4-99fa-739b46f2faf1 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.248s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.120387] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365373, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.544089} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.120666] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 27280d82-22ce-4312-8ff2-216d4364a889/27280d82-22ce-4312-8ff2-216d4364a889.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1040.120885] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1040.121162] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-67da84a1-fe31-4076-b955-3dfdfe6c3870 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.127783] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1040.127783] env[62619]: value = "task-1365375" [ 1040.127783] env[62619]: _type = "Task" [ 1040.127783] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.135858] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365375, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.234930] env[62619]: INFO nova.compute.manager [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Rescuing [ 1040.235258] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1040.235412] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1040.235583] env[62619]: DEBUG nova.network.neutron [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1040.334775] env[62619]: DEBUG nova.scheduler.client.report [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1040.463731] env[62619]: DEBUG oslo_vmware.api [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365374, 'name': ReconfigVM_Task, 'duration_secs': 0.461143} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.464123] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Reconfigured VM instance instance-00000059 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1040.468597] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-9b47c567-0195-4b65-8989-fd1797d8803c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.483502] env[62619]: DEBUG oslo_vmware.api [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1040.483502] env[62619]: value = "task-1365376" [ 1040.483502] env[62619]: _type = "Task" [ 1040.483502] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.491196] env[62619]: DEBUG oslo_vmware.api [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365376, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.637460] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365375, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.204293} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.637688] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1040.638466] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbb56106-3924-47bb-93d8-b64c25b059da {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.660095] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Reconfiguring VM instance instance-00000067 to attach disk [datastore1] 27280d82-22ce-4312-8ff2-216d4364a889/27280d82-22ce-4312-8ff2-216d4364a889.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1040.660386] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5b8c1472-a2a9-4ca3-92a1-b74aa83fd4b8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1040.679850] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1040.679850] env[62619]: value = "task-1365377" [ 1040.679850] env[62619]: _type = "Task" [ 1040.679850] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1040.688762] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365377, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1040.840690] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.699s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1040.842861] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 1.462s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1040.843139] env[62619]: DEBUG nova.objects.instance [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lazy-loading 'resources' on Instance uuid c0cbe0ed-94e0-4d02-ae7a-2589938f4c41 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1040.868680] env[62619]: INFO nova.scheduler.client.report [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Deleted allocations for instance 69e916ee-9edc-4e1b-85a0-40142364e3bb [ 1040.973355] env[62619]: DEBUG nova.network.neutron [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating instance_info_cache with network_info: [{"id": "7c383235-8fc6-41ec-b559-12d1ff143a09", "address": "fa:16:3e:08:41:51", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c383235-8f", "ovs_interfaceid": "7c383235-8fc6-41ec-b559-12d1ff143a09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1040.994528] env[62619]: DEBUG oslo_vmware.api [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365376, 'name': ReconfigVM_Task, 'duration_secs': 0.123724} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1040.994805] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290542', 'volume_id': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'name': 'volume-f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '74aa214a-7eda-4613-a394-bc7477d3078e', 'attached_at': '', 'detached_at': '', 'volume_id': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e', 'serial': 'f9a35717-66fa-4d29-9e27-0e5ac0f81b7e'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1041.190170] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365377, 'name': ReconfigVM_Task, 'duration_secs': 0.25536} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.190515] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Reconfigured VM instance instance-00000067 to attach disk [datastore1] 27280d82-22ce-4312-8ff2-216d4364a889/27280d82-22ce-4312-8ff2-216d4364a889.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1041.191146] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-0e5c5db2-5318-447c-ad5b-2785ed48448a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.197318] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1041.197318] env[62619]: value = "task-1365378" [ 1041.197318] env[62619]: _type = "Task" [ 1041.197318] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.204459] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365378, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1041.376355] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b9157f02-571e-403a-890e-4847273d35ca tempest-AttachInterfacesUnderV243Test-161764308 tempest-AttachInterfacesUnderV243Test-161764308-project-member] Lock "69e916ee-9edc-4e1b-85a0-40142364e3bb" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.659s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1041.443891] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4115c10a-f699-4c94-acdf-fbf6c0a9961b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.451613] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4005144-b278-48b6-8393-05d0cd1f5a93 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.482109] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1041.485014] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4649da22-2855-4c84-b45a-f7a6607eae55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.492478] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53cd6f8d-5976-4091-ad6e-e9bbd4e0fee1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.505530] env[62619]: DEBUG nova.compute.provider_tree [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1041.536081] env[62619]: DEBUG nova.objects.instance [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1041.708264] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365378, 'name': Rename_Task, 'duration_secs': 0.121801} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1041.708544] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1041.708781] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-16889811-fbaf-4f93-9e13-2539cd083d39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1041.715009] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1041.715009] env[62619]: value = "task-1365379" [ 1041.715009] env[62619]: _type = "Task" [ 1041.715009] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1041.727704] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365379, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.009551] env[62619]: DEBUG nova.scheduler.client.report [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1042.018316] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.018585] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-2602e370-49c5-498c-a8fa-ceb0b541c59d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.026722] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1042.026722] env[62619]: value = "task-1365380" [ 1042.026722] env[62619]: _type = "Task" [ 1042.026722] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.035226] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365380, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.225867] env[62619]: DEBUG oslo_vmware.api [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365379, 'name': PowerOnVM_Task, 'duration_secs': 0.44343} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.226209] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1042.226454] env[62619]: INFO nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Took 7.21 seconds to spawn the instance on the hypervisor. [ 1042.226665] env[62619]: DEBUG nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1042.227505] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62dbd589-ae44-4393-98d3-ce431c0595a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.507090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1042.514269] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.536564] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365380, 'name': PowerOffVM_Task, 'duration_secs': 0.185774} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1042.538039] env[62619]: INFO nova.scheduler.client.report [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Deleted allocations for instance c0cbe0ed-94e0-4d02-ae7a-2589938f4c41 [ 1042.539127] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1042.541970] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3182ebd9-0bbe-4bb4-ab6d-15378dccabb5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.545299] env[62619]: DEBUG oslo_concurrency.lockutils [None req-240d9dcf-49f8-4348-bc55-4f27e8d12a87 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1042.546213] env[62619]: DEBUG oslo_concurrency.lockutils [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.039s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1042.546369] env[62619]: DEBUG nova.compute.manager [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1042.547636] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7db1b5c5-a8e6-45d0-9177-2e6cd3229f8d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.567268] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd2917f3-5419-4824-a0c6-c6ea864234a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.571734] env[62619]: DEBUG nova.compute.manager [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1042.572313] env[62619]: DEBUG nova.objects.instance [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1042.606858] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1042.606858] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-392c204b-f9a4-45f7-9202-b232d37e756c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.614172] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1042.614172] env[62619]: value = "task-1365381" [ 1042.614172] env[62619]: _type = "Task" [ 1042.614172] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.623990] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] VM already powered off {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1519}} [ 1042.624241] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1042.624480] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1042.624630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1042.624806] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1042.625104] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ff57f0d9-e653-4bfd-8b73-b9e6c5cbda1a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.634722] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1042.634992] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1042.636630] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c303b1ee-3d01-48be-94db-f7894d94be97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1042.643374] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1042.643374] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52195b62-78e1-9e54-2210-37db4ac627d8" [ 1042.643374] env[62619]: _type = "Task" [ 1042.643374] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1042.651239] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52195b62-78e1-9e54-2210-37db4ac627d8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1042.744596] env[62619]: INFO nova.compute.manager [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Took 12.01 seconds to build instance. [ 1043.049166] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3175fe09-f740-4ade-9a36-b2495bb0b78f tempest-ServerAddressesNegativeTestJSON-1205596538 tempest-ServerAddressesNegativeTestJSON-1205596538-project-member] Lock "c0cbe0ed-94e0-4d02-ae7a-2589938f4c41" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.590s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.077168] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.077446] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-48515c04-dd85-4992-8af8-17651f29a879 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.085029] env[62619]: DEBUG oslo_vmware.api [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1043.085029] env[62619]: value = "task-1365382" [ 1043.085029] env[62619]: _type = "Task" [ 1043.085029] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.095265] env[62619]: DEBUG oslo_vmware.api [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365382, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.153778] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52195b62-78e1-9e54-2210-37db4ac627d8, 'name': SearchDatastore_Task, 'duration_secs': 0.009406} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.154776] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4cae7884-db7b-4bbc-af93-a0ae62b4bde9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.160524] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1043.160524] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520b8155-441c-e9c0-fb94-6812f59d3d0b" [ 1043.160524] env[62619]: _type = "Task" [ 1043.160524] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.169648] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520b8155-441c-e9c0-fb94-6812f59d3d0b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.247286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-a80f48dd-fb46-4998-b82f-5a824d980dab tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "27280d82-22ce-4312-8ff2-216d4364a889" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.523s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.325779] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "27280d82-22ce-4312-8ff2-216d4364a889" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.326372] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "27280d82-22ce-4312-8ff2-216d4364a889" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.326630] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "27280d82-22ce-4312-8ff2-216d4364a889-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1043.326868] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "27280d82-22ce-4312-8ff2-216d4364a889-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1043.327064] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "27280d82-22ce-4312-8ff2-216d4364a889-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1043.329733] env[62619]: INFO nova.compute.manager [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Terminating instance [ 1043.332378] env[62619]: DEBUG nova.compute.manager [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1043.332668] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1043.334158] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbe29c59-b7b7-4ca0-9b65-667a70ecda4f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.345899] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1043.346496] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c6675e6a-2a27-4c2c-bba7-8fd9065a169b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.352868] env[62619]: DEBUG oslo_vmware.api [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1043.352868] env[62619]: value = "task-1365383" [ 1043.352868] env[62619]: _type = "Task" [ 1043.352868] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.360701] env[62619]: DEBUG oslo_vmware.api [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365383, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.594905] env[62619]: DEBUG oslo_vmware.api [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365382, 'name': PowerOffVM_Task, 'duration_secs': 0.283951} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.595253] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.595507] env[62619]: DEBUG nova.compute.manager [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1043.596351] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff373a98-dd28-4bef-a6af-457b76946a39 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.671882] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520b8155-441c-e9c0-fb94-6812f59d3d0b, 'name': SearchDatastore_Task, 'duration_secs': 0.017531} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.672180] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1043.672437] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copying virtual disk from [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk. {{(pid=62619) disk_copy /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:281}} [ 1043.672772] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-91bba9fb-07f5-4eba-9ca5-e5ce4120987a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.680152] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1043.680152] env[62619]: value = "task-1365384" [ 1043.680152] env[62619]: _type = "Task" [ 1043.680152] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.687918] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365384, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1043.865677] env[62619]: DEBUG oslo_vmware.api [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365383, 'name': PowerOffVM_Task, 'duration_secs': 0.246084} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1043.866321] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1043.866620] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1043.866985] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7bd36f4f-cfa4-4405-8033-c7f0d22c57d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.937026] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1043.937026] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1043.937026] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleting the datastore file [datastore1] 27280d82-22ce-4312-8ff2-216d4364a889 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1043.937026] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7f7df2b5-bde4-4a31-b098-1c5df7934a41 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1043.944552] env[62619]: DEBUG oslo_vmware.api [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1043.944552] env[62619]: value = "task-1365386" [ 1043.944552] env[62619]: _type = "Task" [ 1043.944552] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1043.953691] env[62619]: DEBUG oslo_vmware.api [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365386, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.108737] env[62619]: DEBUG oslo_concurrency.lockutils [None req-acb41f65-79d0-4523-b968-58f5f936242a tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.562s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1044.194038] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365384, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.493566} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.194038] env[62619]: INFO nova.virt.vmwareapi.ds_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Copied virtual disk from [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk. [ 1044.194038] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac472346-f29e-4987-b402-6cefedbd42ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.219987] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfiguring VM instance instance-0000005e to attach disk [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1044.220482] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b8a778e3-4586-4be1-869d-54864d5242d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.238185] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1044.238185] env[62619]: value = "task-1365387" [ 1044.238185] env[62619]: _type = "Task" [ 1044.238185] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.247177] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365387, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.457441] env[62619]: DEBUG oslo_vmware.api [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365386, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.347448} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.457947] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1044.458294] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1044.458573] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1044.458840] env[62619]: INFO nova.compute.manager [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Took 1.13 seconds to destroy the instance on the hypervisor. [ 1044.459242] env[62619]: DEBUG oslo.service.loopingcall [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1044.459585] env[62619]: DEBUG nova.compute.manager [-] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1044.459789] env[62619]: DEBUG nova.network.neutron [-] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1044.748332] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365387, 'name': ReconfigVM_Task, 'duration_secs': 0.338486} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1044.748681] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfigured VM instance instance-0000005e to attach disk [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2/a3c7be48-0721-419b-bbd6-8b4cc36c5604-rescue.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1044.749640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e57b6af-4bef-4afb-9522-acdc52bb0ac2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.778669] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-86d4ff42-c879-4713-aa29-b4d1a0be6187 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1044.795631] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1044.795631] env[62619]: value = "task-1365388" [ 1044.795631] env[62619]: _type = "Task" [ 1044.795631] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1044.803680] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365388, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1044.855824] env[62619]: DEBUG nova.compute.manager [req-096ac046-9cef-4459-8b56-db200d8ba406 req-b4485879-3760-487b-997b-2bb803f27770 service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Received event network-vif-deleted-8d9fe820-185c-4a3a-9436-2493bb0a0cb3 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1044.856057] env[62619]: INFO nova.compute.manager [req-096ac046-9cef-4459-8b56-db200d8ba406 req-b4485879-3760-487b-997b-2bb803f27770 service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Neutron deleted interface 8d9fe820-185c-4a3a-9436-2493bb0a0cb3; detaching it from the instance and deleting it from the info cache [ 1044.856562] env[62619]: DEBUG nova.network.neutron [req-096ac046-9cef-4459-8b56-db200d8ba406 req-b4485879-3760-487b-997b-2bb803f27770 service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.061585] env[62619]: DEBUG nova.objects.instance [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.305880] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365388, 'name': ReconfigVM_Task, 'duration_secs': 0.468837} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1045.306186] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1045.306470] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-9a62b88f-f519-4026-b6b9-de256699bd01 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.312464] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1045.312464] env[62619]: value = "task-1365389" [ 1045.312464] env[62619]: _type = "Task" [ 1045.312464] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1045.320017] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365389, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.331106] env[62619]: DEBUG nova.network.neutron [-] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1045.359863] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1beaf74-1ed4-476b-9441-d5f2c5aec8ea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.369147] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2995f4fe-12f1-411d-8df7-2c1e35ec4c61 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1045.395424] env[62619]: DEBUG nova.compute.manager [req-096ac046-9cef-4459-8b56-db200d8ba406 req-b4485879-3760-487b-997b-2bb803f27770 service nova] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Detach interface failed, port_id=8d9fe820-185c-4a3a-9436-2493bb0a0cb3, reason: Instance 27280d82-22ce-4312-8ff2-216d4364a889 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1045.567070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1045.567281] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1045.567383] env[62619]: DEBUG nova.network.neutron [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1045.569032] env[62619]: DEBUG nova.objects.instance [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'info_cache' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1045.822477] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365389, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1045.833057] env[62619]: INFO nova.compute.manager [-] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Took 1.37 seconds to deallocate network for instance. [ 1046.071932] env[62619]: DEBUG nova.objects.base [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Object Instance<74aa214a-7eda-4613-a394-bc7477d3078e> lazy-loaded attributes: flavor,info_cache {{(pid=62619) wrapper /opt/stack/nova/nova/objects/base.py:126}} [ 1046.323289] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365389, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1046.340770] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1046.340963] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1046.341252] env[62619]: DEBUG nova.objects.instance [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lazy-loading 'resources' on Instance uuid 27280d82-22ce-4312-8ff2-216d4364a889 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1046.801402] env[62619]: DEBUG nova.network.neutron [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updating instance_info_cache with network_info: [{"id": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "address": "fa:16:3e:2b:fd:42", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.202", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapec268e92-8f", "ovs_interfaceid": "ec268e92-8fac-43c7-b39c-0518ca9a0403", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1046.823839] env[62619]: DEBUG oslo_vmware.api [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365389, 'name': PowerOnVM_Task, 'duration_secs': 1.404956} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1046.824260] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1046.826794] env[62619]: DEBUG nova.compute.manager [None req-8e3a96a7-a0a5-48da-8387-8ca78e319bf0 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1046.828171] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25b466a7-4467-4f3f-82ea-3bba3105e732 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.949294] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e342779c-b750-4c3a-990c-8585d9f307b7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.957175] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3121f0f-97ec-4b50-bcb6-9872594f74a3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.987275] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb64a6a6-566a-4220-855d-f901dbf04aeb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1046.994114] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e305d737-818f-4999-8a97-0bb7284337a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.008397] env[62619]: DEBUG nova.compute.provider_tree [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1047.305258] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Releasing lock "refresh_cache-74aa214a-7eda-4613-a394-bc7477d3078e" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1047.511490] env[62619]: DEBUG nova.scheduler.client.report [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1047.809502] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1047.809806] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-ba2911d2-4e9a-435c-8071-a18ea92f3cee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1047.817457] env[62619]: DEBUG oslo_vmware.api [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1047.817457] env[62619]: value = "task-1365390" [ 1047.817457] env[62619]: _type = "Task" [ 1047.817457] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1047.824523] env[62619]: DEBUG oslo_vmware.api [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365390, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1048.017522] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.676s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.035306] env[62619]: INFO nova.scheduler.client.report [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted allocations for instance 27280d82-22ce-4312-8ff2-216d4364a889 [ 1048.329167] env[62619]: DEBUG oslo_vmware.api [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365390, 'name': PowerOnVM_Task, 'duration_secs': 0.358138} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1048.329366] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1048.329562] env[62619]: DEBUG nova.compute.manager [None req-d526805a-0285-4cf4-8ebf-e96d4ff5177f tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1048.330343] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1cbe376-6cea-4697-a866-659adc5e4f44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1048.542285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3bf71b45-1104-4327-bd64-5b0e0d9538a6 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "27280d82-22ce-4312-8ff2-216d4364a889" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.216s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1048.767370] env[62619]: INFO nova.compute.manager [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Unrescuing [ 1048.767647] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1048.767802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquired lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1048.767973] env[62619]: DEBUG nova.network.neutron [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1049.201025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1049.201025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1049.482752] env[62619]: DEBUG nova.network.neutron [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating instance_info_cache with network_info: [{"id": "7c383235-8fc6-41ec-b559-12d1ff143a09", "address": "fa:16:3e:08:41:51", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c383235-8f", "ovs_interfaceid": "7c383235-8fc6-41ec-b559-12d1ff143a09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1049.703926] env[62619]: DEBUG nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1049.985542] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Releasing lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1049.986359] env[62619]: DEBUG nova.objects.instance [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lazy-loading 'flavor' on Instance uuid 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1050.226515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1050.226842] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1050.228493] env[62619]: INFO nova.compute.claims [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1050.493070] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5be540cc-0484-4af3-b79a-f03ed166470a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.516716] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1050.517071] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-79ba5bd7-f4b4-4504-bc46-c5d7c67a9c9c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1050.523739] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1050.523739] env[62619]: value = "task-1365391" [ 1050.523739] env[62619]: _type = "Task" [ 1050.523739] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1050.532101] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365391, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.033448] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365391, 'name': PowerOffVM_Task, 'duration_secs': 0.211439} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1051.033727] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1051.038909] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfiguring VM instance instance-0000005e to detach disk 2002 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1051.039203] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3aa11c3f-35dd-419d-93e8-33a1a2e94407 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.057176] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1051.057176] env[62619]: value = "task-1365392" [ 1051.057176] env[62619]: _type = "Task" [ 1051.057176] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1051.065218] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365392, 'name': ReconfigVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.318386] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2fe2aed-4d8a-411c-8246-960dd189164e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.325540] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e6f9ccc-8013-4f24-a94d-7aed68b158bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.356027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91bcc34-e2bc-4dc2-a7dd-45ad81ed1107 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.363046] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8d864c2c-b1e3-4055-bb5e-8d810b22e3b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1051.375849] env[62619]: DEBUG nova.compute.provider_tree [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1051.566917] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365392, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1051.629755] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.629978] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1051.630142] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1051.879011] env[62619]: DEBUG nova.scheduler.client.report [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1052.067955] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365392, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.161341] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1052.161341] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquired lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1052.161502] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1052.383810] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.157s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1052.384452] env[62619]: DEBUG nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1052.568950] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365392, 'name': ReconfigVM_Task, 'duration_secs': 1.2499} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1052.569257] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfigured VM instance instance-0000005e to detach disk 2002 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1052.569453] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1052.569715] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-c5301562-753c-4bdf-9c7c-b5d23ce89f5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1052.575914] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1052.575914] env[62619]: value = "task-1365393" [ 1052.575914] env[62619]: _type = "Task" [ 1052.575914] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1052.584567] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365393, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1052.889805] env[62619]: DEBUG nova.compute.utils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1052.891669] env[62619]: DEBUG nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1052.891830] env[62619]: DEBUG nova.network.neutron [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1052.929913] env[62619]: DEBUG nova.policy [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ee3581d242034a4c8b00417545bd52ed', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c61b5e689e5544e6857baf8d3c52fe0b', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1053.086197] env[62619]: DEBUG oslo_vmware.api [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365393, 'name': PowerOnVM_Task, 'duration_secs': 0.340224} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1053.086449] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1053.086689] env[62619]: DEBUG nova.compute.manager [None req-b64a8ae5-c5e4-42dc-9426-e990c7225e9b tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1053.087453] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9141f99-2e51-4dfb-9f98-821685df644c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.162622] env[62619]: DEBUG nova.network.neutron [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Successfully created port: 02100821-c331-4ac6-897d-5dab9c259eca {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1053.394694] env[62619]: DEBUG nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1053.456706] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Updating instance_info_cache with network_info: [{"id": "b2d0f2bc-db7c-485c-8126-98219c38a4fd", "address": "fa:16:3e:e7:7e:54", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.7", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb2d0f2bc-db", "ovs_interfaceid": "b2d0f2bc-db7c-485c-8126-98219c38a4fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1053.958931] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Releasing lock "refresh_cache-9f7d7830-b878-41b9-a236-f7cd5580cf1d" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1053.959168] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1053.959366] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.959530] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.959674] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.959819] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.959960] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.960125] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.960256] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1053.960404] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.350741] env[62619]: DEBUG nova.compute.manager [req-af1eb6b6-98da-4bbb-bf32-88ea9c7a9dd7 req-b665cc71-6a10-4d45-b037-95267e20aac6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Received event network-changed-7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1054.351085] env[62619]: DEBUG nova.compute.manager [req-af1eb6b6-98da-4bbb-bf32-88ea9c7a9dd7 req-b665cc71-6a10-4d45-b037-95267e20aac6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Refreshing instance network info cache due to event network-changed-7c383235-8fc6-41ec-b559-12d1ff143a09. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1054.351351] env[62619]: DEBUG oslo_concurrency.lockutils [req-af1eb6b6-98da-4bbb-bf32-88ea9c7a9dd7 req-b665cc71-6a10-4d45-b037-95267e20aac6 service nova] Acquiring lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1054.351506] env[62619]: DEBUG oslo_concurrency.lockutils [req-af1eb6b6-98da-4bbb-bf32-88ea9c7a9dd7 req-b665cc71-6a10-4d45-b037-95267e20aac6 service nova] Acquired lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1054.351671] env[62619]: DEBUG nova.network.neutron [req-af1eb6b6-98da-4bbb-bf32-88ea9c7a9dd7 req-b665cc71-6a10-4d45-b037-95267e20aac6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Refreshing network info cache for port 7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1054.404813] env[62619]: DEBUG nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1054.434489] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1054.434745] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1054.434904] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1054.435359] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1054.435359] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1054.435511] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1054.436596] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1054.436596] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1054.436596] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1054.436596] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1054.436596] env[62619]: DEBUG nova.virt.hardware [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1054.437118] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c96e3c9-4b84-428c-be8c-1c063dd541a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.445674] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84591b0e-0f41-48a9-93ca-6c8510395842 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.462932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.463224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.463344] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1054.463497] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1054.464568] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cca88aae-7f60-43a4-a0cb-81a350be646c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.474853] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddeba5a0-2b77-4092-af31-3912f2224e97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.493594] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2d4752c-2c54-450b-be4f-0a694df31cc5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.500246] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afedd37f-0509-459f-b4bb-bcf541f14be1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.528418] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180575MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1054.528578] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1054.528775] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1054.742506] env[62619]: DEBUG nova.network.neutron [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Successfully updated port: 02100821-c331-4ac6-897d-5dab9c259eca {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1055.065627] env[62619]: DEBUG nova.network.neutron [req-af1eb6b6-98da-4bbb-bf32-88ea9c7a9dd7 req-b665cc71-6a10-4d45-b037-95267e20aac6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updated VIF entry in instance network info cache for port 7c383235-8fc6-41ec-b559-12d1ff143a09. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1055.065997] env[62619]: DEBUG nova.network.neutron [req-af1eb6b6-98da-4bbb-bf32-88ea9c7a9dd7 req-b665cc71-6a10-4d45-b037-95267e20aac6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating instance_info_cache with network_info: [{"id": "7c383235-8fc6-41ec-b559-12d1ff143a09", "address": "fa:16:3e:08:41:51", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c383235-8f", "ovs_interfaceid": "7c383235-8fc6-41ec-b559-12d1ff143a09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.246147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "refresh_cache-7dd76e44-c5d3-4178-8942-6eb6e01edbcd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1055.246147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "refresh_cache-7dd76e44-c5d3-4178-8942-6eb6e01edbcd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1055.246147] env[62619]: DEBUG nova.network.neutron [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1055.557863] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 9f7d7830-b878-41b9-a236-f7cd5580cf1d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.558136] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 02dec4f2-cbe7-4bb0-a57e-3970c5669354 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.558136] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.558262] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 74aa214a-7eda-4613-a394-bc7477d3078e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.558447] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.558580] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 460c12f6-5eb8-427c-bda4-6773e1bc9034 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.558696] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 7dd76e44-c5d3-4178-8942-6eb6e01edbcd actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.558879] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1055.559026] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1856MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1055.568779] env[62619]: DEBUG oslo_concurrency.lockutils [req-af1eb6b6-98da-4bbb-bf32-88ea9c7a9dd7 req-b665cc71-6a10-4d45-b037-95267e20aac6 service nova] Releasing lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1055.645250] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81b5304d-f3dd-44f7-864f-4e2202f08649 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.653091] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa945cd6-bb0e-4b68-855f-6278307f5976 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.683277] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41980c3b-3640-47bc-9075-d5e130e3d341 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.690111] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5a3b668-9939-469b-a0d3-84522528bb4b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.702607] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1055.777230] env[62619]: DEBUG nova.network.neutron [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1055.901635] env[62619]: DEBUG nova.network.neutron [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Updating instance_info_cache with network_info: [{"id": "02100821-c331-4ac6-897d-5dab9c259eca", "address": "fa:16:3e:01:87:f8", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02100821-c3", "ovs_interfaceid": "02100821-c331-4ac6-897d-5dab9c259eca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.221697] env[62619]: ERROR nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [req-510a987e-841d-4874-86ef-f8775a39bcfa] Failed to update inventory to [{'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}}] for resource provider with UUID c1b543f3-8b72-4e01-a5a8-30dc9ed76c83. Got 409: {"errors": [{"status": 409, "title": "Conflict", "detail": "There was a conflict when trying to complete your request.\n\n resource provider generation conflict ", "code": "placement.concurrent_update", "request_id": "req-510a987e-841d-4874-86ef-f8775a39bcfa"}]} [ 1056.238695] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Refreshing inventories for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:818}} [ 1056.251407] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating ProviderTree inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:782}} [ 1056.251595] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1056.261749] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Refreshing aggregate associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, aggregates: None {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:827}} [ 1056.280183] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Refreshing trait associations for resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83, traits: COMPUTE_NODE,COMPUTE_NET_ATTACH_INTERFACE,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_IMAGE_TYPE_ISO {{(pid=62619) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:839}} [ 1056.379197] env[62619]: DEBUG nova.compute.manager [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Received event network-changed-7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1056.379405] env[62619]: DEBUG nova.compute.manager [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Refreshing instance network info cache due to event network-changed-7c383235-8fc6-41ec-b559-12d1ff143a09. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1056.379631] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Acquiring lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.379776] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Acquired lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.379938] env[62619]: DEBUG nova.network.neutron [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Refreshing network info cache for port 7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1056.403788] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "refresh_cache-7dd76e44-c5d3-4178-8942-6eb6e01edbcd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1056.404129] env[62619]: DEBUG nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Instance network_info: |[{"id": "02100821-c331-4ac6-897d-5dab9c259eca", "address": "fa:16:3e:01:87:f8", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02100821-c3", "ovs_interfaceid": "02100821-c331-4ac6-897d-5dab9c259eca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1056.404522] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:01:87:f8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '6b5a629f-6902-4d30-9278-74b443a8371d', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '02100821-c331-4ac6-897d-5dab9c259eca', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1056.411854] env[62619]: DEBUG oslo.service.loopingcall [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1056.414517] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1056.414922] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1767556d-5856-43f9-a8e4-949841183d02 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.437198] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1056.437198] env[62619]: value = "task-1365394" [ 1056.437198] env[62619]: _type = "Task" [ 1056.437198] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.447379] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365394, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1056.452336] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76b5168b-795e-4002-b4de-9ac686468ac1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.459530] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a97f414-9f50-4aab-a0e6-45be0e7848db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.490637] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b82dda9b-dccc-4e9f-9b50-f5580503333c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.498454] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52b32232-06da-487e-bf14-40542ad82daf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.512745] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1056.949017] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365394, 'name': CreateVM_Task, 'duration_secs': 0.364563} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1056.949359] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1056.949853] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1056.950034] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1056.950376] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1056.950635] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-38b2c14c-cd6e-415e-b62d-35cda60b477d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.955367] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1056.955367] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5244dd73-954a-4f33-9cdf-c2771d362985" [ 1056.955367] env[62619]: _type = "Task" [ 1056.955367] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1056.962868] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5244dd73-954a-4f33-9cdf-c2771d362985, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.041564] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 134 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1057.041781] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 134 to 135 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1057.041932] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1057.090220] env[62619]: DEBUG nova.network.neutron [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updated VIF entry in instance network info cache for port 7c383235-8fc6-41ec-b559-12d1ff143a09. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1057.090603] env[62619]: DEBUG nova.network.neutron [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating instance_info_cache with network_info: [{"id": "7c383235-8fc6-41ec-b559-12d1ff143a09", "address": "fa:16:3e:08:41:51", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.212", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7c383235-8f", "ovs_interfaceid": "7c383235-8fc6-41ec-b559-12d1ff143a09", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1057.466802] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5244dd73-954a-4f33-9cdf-c2771d362985, 'name': SearchDatastore_Task, 'duration_secs': 0.008781} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.466802] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.466802] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1057.467022] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.467066] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.467268] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1057.467524] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c3e71339-6621-4edf-96c1-9999289b5f0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.475392] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1057.475571] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1057.476261] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4d1fdd61-7477-4c7e-8f29-0415d85c723a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.480960] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1057.480960] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5244ad20-1d4e-c3e1-fd07-c0bc017de7d9" [ 1057.480960] env[62619]: _type = "Task" [ 1057.480960] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1057.488143] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5244ad20-1d4e-c3e1-fd07-c0bc017de7d9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1057.550080] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1057.550285] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 3.021s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.593232] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Releasing lock "refresh_cache-52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1057.593510] env[62619]: DEBUG nova.compute.manager [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Received event network-vif-plugged-02100821-c331-4ac6-897d-5dab9c259eca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1057.593719] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Acquiring lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1057.593933] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1057.594129] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1057.594309] env[62619]: DEBUG nova.compute.manager [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] No waiting events found dispatching network-vif-plugged-02100821-c331-4ac6-897d-5dab9c259eca {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1057.594492] env[62619]: WARNING nova.compute.manager [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Received unexpected event network-vif-plugged-02100821-c331-4ac6-897d-5dab9c259eca for instance with vm_state building and task_state spawning. [ 1057.594659] env[62619]: DEBUG nova.compute.manager [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Received event network-changed-02100821-c331-4ac6-897d-5dab9c259eca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1057.594821] env[62619]: DEBUG nova.compute.manager [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Refreshing instance network info cache due to event network-changed-02100821-c331-4ac6-897d-5dab9c259eca. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1057.595021] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Acquiring lock "refresh_cache-7dd76e44-c5d3-4178-8942-6eb6e01edbcd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1057.595190] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Acquired lock "refresh_cache-7dd76e44-c5d3-4178-8942-6eb6e01edbcd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1057.595387] env[62619]: DEBUG nova.network.neutron [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Refreshing network info cache for port 02100821-c331-4ac6-897d-5dab9c259eca {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1057.991401] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5244ad20-1d4e-c3e1-fd07-c0bc017de7d9, 'name': SearchDatastore_Task, 'duration_secs': 0.007964} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1057.992172] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a1f96cbf-a90f-4508-86b1-f68799e73fef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.996992] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1057.996992] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526e052a-55ca-2607-b7f4-cea9a1ffa014" [ 1057.996992] env[62619]: _type = "Task" [ 1057.996992] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.004634] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526e052a-55ca-2607-b7f4-cea9a1ffa014, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.287077] env[62619]: DEBUG nova.network.neutron [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Updated VIF entry in instance network info cache for port 02100821-c331-4ac6-897d-5dab9c259eca. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1058.287514] env[62619]: DEBUG nova.network.neutron [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Updating instance_info_cache with network_info: [{"id": "02100821-c331-4ac6-897d-5dab9c259eca", "address": "fa:16:3e:01:87:f8", "network": {"id": "b5391776-4bcb-48d8-9215-2597025c3489", "bridge": "br-int", "label": "tempest-ServersTestJSON-997854958-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c61b5e689e5544e6857baf8d3c52fe0b", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "6b5a629f-6902-4d30-9278-74b443a8371d", "external-id": "nsx-vlan-transportzone-185", "segmentation_id": 185, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap02100821-c3", "ovs_interfaceid": "02100821-c331-4ac6-897d-5dab9c259eca", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.507804] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]526e052a-55ca-2607-b7f4-cea9a1ffa014, 'name': SearchDatastore_Task, 'duration_secs': 0.008914} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1058.508040] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1058.508351] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 7dd76e44-c5d3-4178-8942-6eb6e01edbcd/7dd76e44-c5d3-4178-8942-6eb6e01edbcd.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1058.508614] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e0349ccf-c8fc-4010-b733-e0c8506b71fa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.516766] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1058.516766] env[62619]: value = "task-1365395" [ 1058.516766] env[62619]: _type = "Task" [ 1058.516766] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.523600] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365395, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1058.790535] env[62619]: DEBUG oslo_concurrency.lockutils [req-1a251f70-354b-4455-bd68-acff75877306 req-a752c612-cee5-444c-855d-184d62b5d6c6 service nova] Releasing lock "refresh_cache-7dd76e44-c5d3-4178-8942-6eb6e01edbcd" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1059.026265] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365395, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.438265} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.026599] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 7dd76e44-c5d3-4178-8942-6eb6e01edbcd/7dd76e44-c5d3-4178-8942-6eb6e01edbcd.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1059.026728] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1059.026976] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-f0c30832-ce04-4f88-812d-fda78c9bc3d3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.033495] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1059.033495] env[62619]: value = "task-1365396" [ 1059.033495] env[62619]: _type = "Task" [ 1059.033495] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.040415] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365396, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.543033] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365396, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.062339} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.543327] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1059.544122] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3550b3-a0e8-42bb-b540-5521134b3190 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.565244] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Reconfiguring VM instance instance-00000068 to attach disk [datastore1] 7dd76e44-c5d3-4178-8942-6eb6e01edbcd/7dd76e44-c5d3-4178-8942-6eb6e01edbcd.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1059.565495] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-afd984bd-5b65-44c5-8b8b-1af7ba557e0a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.584399] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1059.584399] env[62619]: value = "task-1365397" [ 1059.584399] env[62619]: _type = "Task" [ 1059.584399] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.591800] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365397, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.094209] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365397, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1060.594921] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365397, 'name': ReconfigVM_Task, 'duration_secs': 0.914148} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1060.596031] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Reconfigured VM instance instance-00000068 to attach disk [datastore1] 7dd76e44-c5d3-4178-8942-6eb6e01edbcd/7dd76e44-c5d3-4178-8942-6eb6e01edbcd.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1060.596166] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-51b9b788-f4ae-43c6-a3ef-44095bcaf4e7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1060.602411] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1060.602411] env[62619]: value = "task-1365398" [ 1060.602411] env[62619]: _type = "Task" [ 1060.602411] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1060.611096] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365398, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.111825] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365398, 'name': Rename_Task, 'duration_secs': 0.141223} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1061.112316] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1061.112316] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-15198f05-7001-4dd8-9dda-14ec23c86afa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1061.118218] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1061.118218] env[62619]: value = "task-1365399" [ 1061.118218] env[62619]: _type = "Task" [ 1061.118218] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1061.127685] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365399, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1061.628457] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365399, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1062.128605] env[62619]: DEBUG oslo_vmware.api [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365399, 'name': PowerOnVM_Task, 'duration_secs': 0.549435} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1062.129107] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1062.129226] env[62619]: INFO nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Took 7.72 seconds to spawn the instance on the hypervisor. [ 1062.129436] env[62619]: DEBUG nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1062.130209] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fc98388a-ef36-486b-bc75-056a58cc29b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1062.649518] env[62619]: INFO nova.compute.manager [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Took 12.44 seconds to build instance. [ 1063.151430] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b4da4f0a-a2a2-42cf-9727-d88c6f379432 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.951s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1063.595325] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1063.595646] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" acquired by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1063.595835] env[62619]: DEBUG nova.compute.manager [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1063.596808] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de45d62e-43d0-4b33-a9e0-ad4939175f7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1063.603710] env[62619]: DEBUG nova.compute.manager [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Stopping instance; current vm_state: active, current task_state: powering-off, current DB power_state: 1, current VM power_state: 1 {{(pid=62619) do_stop_instance /opt/stack/nova/nova/compute/manager.py:3368}} [ 1063.604288] env[62619]: DEBUG nova.objects.instance [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lazy-loading 'flavor' on Instance uuid 7dd76e44-c5d3-4178-8942-6eb6e01edbcd {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1064.109990] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1064.110312] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-ca27c103-5536-444c-acd3-0206e1d67219 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1064.118366] env[62619]: DEBUG oslo_vmware.api [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1064.118366] env[62619]: value = "task-1365400" [ 1064.118366] env[62619]: _type = "Task" [ 1064.118366] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1064.127147] env[62619]: DEBUG oslo_vmware.api [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365400, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1064.628511] env[62619]: DEBUG oslo_vmware.api [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365400, 'name': PowerOffVM_Task, 'duration_secs': 0.184873} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1064.628876] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1064.628950] env[62619]: DEBUG nova.compute.manager [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1064.629707] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71915906-6ee1-48f8-a314-8815128e8e57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1065.140345] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5fb02e00-1749-434f-b6da-8f228327f356 tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" "released" by "nova.compute.manager.ComputeManager.stop_instance..do_stop_instance" :: held 1.545s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.199651] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.200032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.200611] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1066.200849] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1066.201073] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1066.203180] env[62619]: INFO nova.compute.manager [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Terminating instance [ 1066.205036] env[62619]: DEBUG nova.compute.manager [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1066.205246] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1066.206085] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-107d1807-50b4-4c98-8768-dbf640bdb347 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.213737] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1066.214080] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5db9878d-c3d4-4295-aaee-778b74476197 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.275760] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1066.275929] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1066.276160] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleting the datastore file [datastore1] 7dd76e44-c5d3-4178-8942-6eb6e01edbcd {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1066.276454] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-53ef57d9-ab51-4b5f-b62e-e6fcc81184c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1066.283282] env[62619]: DEBUG oslo_vmware.api [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1066.283282] env[62619]: value = "task-1365402" [ 1066.283282] env[62619]: _type = "Task" [ 1066.283282] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1066.291101] env[62619]: DEBUG oslo_vmware.api [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365402, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1066.793050] env[62619]: DEBUG oslo_vmware.api [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365402, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.137731} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1066.793269] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1066.793458] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1066.793616] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1066.793791] env[62619]: INFO nova.compute.manager [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Took 0.59 seconds to destroy the instance on the hypervisor. [ 1066.794044] env[62619]: DEBUG oslo.service.loopingcall [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1066.794263] env[62619]: DEBUG nova.compute.manager [-] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1066.794364] env[62619]: DEBUG nova.network.neutron [-] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1067.049274] env[62619]: DEBUG nova.compute.manager [req-a10f5da6-84ef-4423-8f90-94a6d7a7da47 req-671e1206-ff13-4e12-8bdb-aa4c4018e217 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Received event network-vif-deleted-02100821-c331-4ac6-897d-5dab9c259eca {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1067.049899] env[62619]: INFO nova.compute.manager [req-a10f5da6-84ef-4423-8f90-94a6d7a7da47 req-671e1206-ff13-4e12-8bdb-aa4c4018e217 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Neutron deleted interface 02100821-c331-4ac6-897d-5dab9c259eca; detaching it from the instance and deleting it from the info cache [ 1067.049899] env[62619]: DEBUG nova.network.neutron [req-a10f5da6-84ef-4423-8f90-94a6d7a7da47 req-671e1206-ff13-4e12-8bdb-aa4c4018e217 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.525704] env[62619]: DEBUG nova.network.neutron [-] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1067.553074] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c4c982c1-e1c9-4d51-bb3e-d3c5f83e72f8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.562044] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1beee30f-eff0-4e5a-9af4-f7145b0075f5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1067.588283] env[62619]: DEBUG nova.compute.manager [req-a10f5da6-84ef-4423-8f90-94a6d7a7da47 req-671e1206-ff13-4e12-8bdb-aa4c4018e217 service nova] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Detach interface failed, port_id=02100821-c331-4ac6-897d-5dab9c259eca, reason: Instance 7dd76e44-c5d3-4178-8942-6eb6e01edbcd could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1068.028220] env[62619]: INFO nova.compute.manager [-] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Took 1.23 seconds to deallocate network for instance. [ 1068.534493] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.534780] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1068.535014] env[62619]: DEBUG nova.objects.instance [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lazy-loading 'resources' on Instance uuid 7dd76e44-c5d3-4178-8942-6eb6e01edbcd {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1068.964332] env[62619]: DEBUG oslo_concurrency.lockutils [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1068.964589] env[62619]: DEBUG oslo_concurrency.lockutils [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1069.123938] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3faac484-0000-411d-b259-64a48f2b256b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.131256] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6caeb0ba-9602-4317-b42f-26b85c8da9eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.159481] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21a18b02-234f-4014-94e1-7da423a25107 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.165980] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e06b5e0-477f-4633-8775-075914699c0d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1069.178446] env[62619]: DEBUG nova.compute.provider_tree [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1069.468150] env[62619]: DEBUG nova.compute.utils [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1069.712019] env[62619]: DEBUG nova.scheduler.client.report [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 135 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1069.712019] env[62619]: DEBUG nova.compute.provider_tree [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 135 to 136 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1069.712019] env[62619]: DEBUG nova.compute.provider_tree [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 148, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1069.971339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.216147] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.681s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1070.235017] env[62619]: INFO nova.scheduler.client.report [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted allocations for instance 7dd76e44-c5d3-4178-8942-6eb6e01edbcd [ 1070.742750] env[62619]: DEBUG oslo_concurrency.lockutils [None req-5f47ed99-2ac5-4b04-a31f-f3234f6eef7e tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "7dd76e44-c5d3-4178-8942-6eb6e01edbcd" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 4.543s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.043953] env[62619]: DEBUG oslo_concurrency.lockutils [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.044272] env[62619]: DEBUG oslo_concurrency.lockutils [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.044558] env[62619]: INFO nova.compute.manager [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Attaching volume 89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5 to /dev/sdb [ 1071.075111] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4073dd6f-9eca-49c6-8578-977a0f9613d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.081876] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39bae912-3961-4019-9083-831d812e6dfc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.096023] env[62619]: DEBUG nova.virt.block_device [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Updating existing volume attachment record: b0722a6e-e4be-46bc-a107-811d8bfc77cd {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1071.929227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.929643] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.929942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1071.930227] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1071.930464] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1071.932633] env[62619]: INFO nova.compute.manager [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Terminating instance [ 1071.934619] env[62619]: DEBUG nova.compute.manager [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1071.934853] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1071.935744] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b313dde-caae-4926-816c-654dcf7dc643 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.943128] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1071.943376] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-cb976a43-df58-41b9-bba5-25243b9cdf96 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1071.949162] env[62619]: DEBUG oslo_vmware.api [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1071.949162] env[62619]: value = "task-1365406" [ 1071.949162] env[62619]: _type = "Task" [ 1071.949162] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1071.956734] env[62619]: DEBUG oslo_vmware.api [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365406, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1072.458953] env[62619]: DEBUG oslo_vmware.api [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365406, 'name': PowerOffVM_Task, 'duration_secs': 0.208411} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1072.459235] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1072.459410] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1072.459669] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2189e28e-ddae-4951-844b-cd3063e2f9b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.520837] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1072.521082] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1072.521272] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleting the datastore file [datastore2] 9f7d7830-b878-41b9-a236-f7cd5580cf1d {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1072.521539] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fabd0cf3-2a61-4a62-a889-907a30584af2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1072.527463] env[62619]: DEBUG oslo_vmware.api [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for the task: (returnval){ [ 1072.527463] env[62619]: value = "task-1365408" [ 1072.527463] env[62619]: _type = "Task" [ 1072.527463] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1072.534795] env[62619]: DEBUG oslo_vmware.api [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365408, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1073.037751] env[62619]: DEBUG oslo_vmware.api [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Task: {'id': task-1365408, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.133033} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1073.038094] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1073.038264] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1073.038395] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1073.038608] env[62619]: INFO nova.compute.manager [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1073.038860] env[62619]: DEBUG oslo.service.loopingcall [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1073.039068] env[62619]: DEBUG nova.compute.manager [-] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1073.039166] env[62619]: DEBUG nova.network.neutron [-] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1073.290927] env[62619]: DEBUG nova.compute.manager [req-aa1294bb-d9f0-4d82-96a1-977253838a7d req-615744bd-d20b-4201-854c-e72da78386f7 service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Received event network-vif-deleted-b2d0f2bc-db7c-485c-8126-98219c38a4fd {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1073.291107] env[62619]: INFO nova.compute.manager [req-aa1294bb-d9f0-4d82-96a1-977253838a7d req-615744bd-d20b-4201-854c-e72da78386f7 service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Neutron deleted interface b2d0f2bc-db7c-485c-8126-98219c38a4fd; detaching it from the instance and deleting it from the info cache [ 1073.291254] env[62619]: DEBUG nova.network.neutron [req-aa1294bb-d9f0-4d82-96a1-977253838a7d req-615744bd-d20b-4201-854c-e72da78386f7 service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.771372] env[62619]: DEBUG nova.network.neutron [-] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1073.793392] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-f1abf8d2-648d-472a-8b14-9d43e1e20cce {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.803792] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b0c4da7-463e-4cfb-8caa-0c5ae8000d0b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1073.828193] env[62619]: DEBUG nova.compute.manager [req-aa1294bb-d9f0-4d82-96a1-977253838a7d req-615744bd-d20b-4201-854c-e72da78386f7 service nova] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Detach interface failed, port_id=b2d0f2bc-db7c-485c-8126-98219c38a4fd, reason: Instance 9f7d7830-b878-41b9-a236-f7cd5580cf1d could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1074.274077] env[62619]: INFO nova.compute.manager [-] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Took 1.23 seconds to deallocate network for instance. [ 1074.780269] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1074.780557] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1074.780785] env[62619]: DEBUG nova.objects.instance [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lazy-loading 'resources' on Instance uuid 9f7d7830-b878-41b9-a236-f7cd5580cf1d {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1075.358337] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b63f18f4-e35b-4cfd-bc64-8ca2f2f28e07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.366059] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b9e6e8c-3ca1-4261-9d4b-ebb91b83b3b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.395835] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec294f5c-1cad-4ff3-b423-6722adc50614 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.402576] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85bb0402-0115-49bb-9103-c8b65fe7549d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.415278] env[62619]: DEBUG nova.compute.provider_tree [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1075.639666] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1075.639916] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290560', 'volume_id': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'name': 'volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '460c12f6-5eb8-427c-bda4-6773e1bc9034', 'attached_at': '', 'detached_at': '', 'volume_id': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'serial': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1075.640798] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a6b4b7ac-6c38-47a9-b30d-653ae2cf1a65 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.656631] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04eb173a-1a59-47f1-ac8f-25b81280f7c5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.679261] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Reconfiguring VM instance instance-00000065 to attach disk [datastore1] volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5/volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1075.679493] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-5d8ee8ed-02c5-4b8e-99b1-19a5e515d471 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1075.696313] env[62619]: DEBUG oslo_vmware.api [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1075.696313] env[62619]: value = "task-1365410" [ 1075.696313] env[62619]: _type = "Task" [ 1075.696313] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1075.703523] env[62619]: DEBUG oslo_vmware.api [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365410, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1075.945454] env[62619]: DEBUG nova.scheduler.client.report [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updated inventory for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with generation 136 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:971}} [ 1075.945755] env[62619]: DEBUG nova.compute.provider_tree [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating resource provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 generation from 136 to 137 during operation: update_inventory {{(pid=62619) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 1075.945947] env[62619]: DEBUG nova.compute.provider_tree [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Updating inventory in ProviderTree for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1076.206362] env[62619]: DEBUG oslo_vmware.api [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365410, 'name': ReconfigVM_Task, 'duration_secs': 0.328283} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.206506] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Reconfigured VM instance instance-00000065 to attach disk [datastore1] volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5/volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1076.211387] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b607866e-8ecc-4483-996b-cadb7f509544 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1076.225533] env[62619]: DEBUG oslo_vmware.api [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1076.225533] env[62619]: value = "task-1365411" [ 1076.225533] env[62619]: _type = "Task" [ 1076.225533] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1076.232968] env[62619]: DEBUG oslo_vmware.api [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365411, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1076.451317] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.671s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1076.470939] env[62619]: INFO nova.scheduler.client.report [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Deleted allocations for instance 9f7d7830-b878-41b9-a236-f7cd5580cf1d [ 1076.734635] env[62619]: DEBUG oslo_vmware.api [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365411, 'name': ReconfigVM_Task, 'duration_secs': 0.146883} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1076.734968] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290560', 'volume_id': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'name': 'volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '460c12f6-5eb8-427c-bda4-6773e1bc9034', 'attached_at': '', 'detached_at': '', 'volume_id': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'serial': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1076.981328] env[62619]: DEBUG oslo_concurrency.lockutils [None req-10f5160c-754c-43ca-bafd-3872e9fba2dc tempest-ServersTestJSON-203516608 tempest-ServersTestJSON-203516608-project-member] Lock "9f7d7830-b878-41b9-a236-f7cd5580cf1d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.052s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1077.774856] env[62619]: DEBUG nova.objects.instance [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'flavor' on Instance uuid 460c12f6-5eb8-427c-bda4-6773e1bc9034 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1078.279856] env[62619]: DEBUG oslo_concurrency.lockutils [None req-703bd797-ff45-4dad-b3da-cadd5a205e68 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.235s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1079.469154] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1079.469440] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1079.971756] env[62619]: DEBUG nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1080.493543] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1080.493851] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1080.495446] env[62619]: INFO nova.compute.claims [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1081.575384] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21caa5f9-eee4-4072-9e20-7287a1cf1d5f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.582983] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbb7100f-7f3f-43c8-b9bf-a251023a4a7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.612897] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2f8bb4c-593b-4883-897e-eb17ac993208 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.619381] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddf08610-8c60-4ca3-a5fa-a51add3340ac {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1081.631835] env[62619]: DEBUG nova.compute.provider_tree [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1082.134746] env[62619]: DEBUG nova.scheduler.client.report [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1082.640158] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.146s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1082.640733] env[62619]: DEBUG nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1083.146071] env[62619]: DEBUG nova.compute.utils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1083.147504] env[62619]: DEBUG nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1083.147696] env[62619]: DEBUG nova.network.neutron [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1083.195760] env[62619]: DEBUG nova.policy [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34a14400ec56409ca356b449a9e30cf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60ccdf8f256c427b9767a01dad0616fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1083.444681] env[62619]: DEBUG nova.network.neutron [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Successfully created port: a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1083.651205] env[62619]: DEBUG nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1084.661306] env[62619]: DEBUG nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1084.687466] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1084.687720] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1084.687878] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1084.688079] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1084.688233] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1084.688382] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1084.688587] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1084.688759] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1084.688927] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1084.689105] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1084.689286] env[62619]: DEBUG nova.virt.hardware [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1084.690142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61e4f85e-1951-4ade-8150-bf3a5f7d57d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.697849] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b176b44-8f2d-42ef-954d-faf1b609c4a9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1084.801564] env[62619]: DEBUG nova.compute.manager [req-43dd6ddc-b09b-428d-9029-e76d7301c6b3 req-429d09d9-72d5-4113-88c6-9f4b74cd8fb9 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Received event network-vif-plugged-a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1084.801797] env[62619]: DEBUG oslo_concurrency.lockutils [req-43dd6ddc-b09b-428d-9029-e76d7301c6b3 req-429d09d9-72d5-4113-88c6-9f4b74cd8fb9 service nova] Acquiring lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1084.801991] env[62619]: DEBUG oslo_concurrency.lockutils [req-43dd6ddc-b09b-428d-9029-e76d7301c6b3 req-429d09d9-72d5-4113-88c6-9f4b74cd8fb9 service nova] Lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1084.802224] env[62619]: DEBUG oslo_concurrency.lockutils [req-43dd6ddc-b09b-428d-9029-e76d7301c6b3 req-429d09d9-72d5-4113-88c6-9f4b74cd8fb9 service nova] Lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1084.802407] env[62619]: DEBUG nova.compute.manager [req-43dd6ddc-b09b-428d-9029-e76d7301c6b3 req-429d09d9-72d5-4113-88c6-9f4b74cd8fb9 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] No waiting events found dispatching network-vif-plugged-a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1084.802603] env[62619]: WARNING nova.compute.manager [req-43dd6ddc-b09b-428d-9029-e76d7301c6b3 req-429d09d9-72d5-4113-88c6-9f4b74cd8fb9 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Received unexpected event network-vif-plugged-a761dd6a-1f50-4c60-8f2f-5b5acea9deab for instance with vm_state building and task_state spawning. [ 1085.354487] env[62619]: DEBUG nova.network.neutron [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Successfully updated port: a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1085.380516] env[62619]: DEBUG nova.compute.manager [req-4fda89ca-7214-41b3-b16c-d35bd5403952 req-b4b5ec72-bb89-4fce-ba3f-2c8aea23e795 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Received event network-changed-a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1085.380742] env[62619]: DEBUG nova.compute.manager [req-4fda89ca-7214-41b3-b16c-d35bd5403952 req-b4b5ec72-bb89-4fce-ba3f-2c8aea23e795 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Refreshing instance network info cache due to event network-changed-a761dd6a-1f50-4c60-8f2f-5b5acea9deab. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1085.381087] env[62619]: DEBUG oslo_concurrency.lockutils [req-4fda89ca-7214-41b3-b16c-d35bd5403952 req-b4b5ec72-bb89-4fce-ba3f-2c8aea23e795 service nova] Acquiring lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.381087] env[62619]: DEBUG oslo_concurrency.lockutils [req-4fda89ca-7214-41b3-b16c-d35bd5403952 req-b4b5ec72-bb89-4fce-ba3f-2c8aea23e795 service nova] Acquired lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1085.381238] env[62619]: DEBUG nova.network.neutron [req-4fda89ca-7214-41b3-b16c-d35bd5403952 req-b4b5ec72-bb89-4fce-ba3f-2c8aea23e795 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Refreshing network info cache for port a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1085.456325] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.456545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.456756] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "74aa214a-7eda-4613-a394-bc7477d3078e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1085.456942] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1085.457126] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1085.459226] env[62619]: INFO nova.compute.manager [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Terminating instance [ 1085.460874] env[62619]: DEBUG nova.compute.manager [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1085.461090] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1085.461894] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edd4da32-e341-4341-b6eb-48fb0e82d6d1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.469751] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1085.469997] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-8bef4f28-b42d-44b7-b5c1-c61c0255b76e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1085.476515] env[62619]: DEBUG oslo_vmware.api [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1085.476515] env[62619]: value = "task-1365412" [ 1085.476515] env[62619]: _type = "Task" [ 1085.476515] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1085.483964] env[62619]: DEBUG oslo_vmware.api [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365412, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1085.858065] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1085.913173] env[62619]: DEBUG nova.network.neutron [req-4fda89ca-7214-41b3-b16c-d35bd5403952 req-b4b5ec72-bb89-4fce-ba3f-2c8aea23e795 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1085.982052] env[62619]: DEBUG nova.network.neutron [req-4fda89ca-7214-41b3-b16c-d35bd5403952 req-b4b5ec72-bb89-4fce-ba3f-2c8aea23e795 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1085.986204] env[62619]: DEBUG oslo_vmware.api [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365412, 'name': PowerOffVM_Task, 'duration_secs': 0.162494} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1085.986653] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1085.986835] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1085.987102] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-306501d9-9e90-409c-a50d-5fc8c3cd6075 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.130101] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1086.130101] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1086.130101] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Deleting the datastore file [datastore2] 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1086.130317] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3c571efd-6184-45ba-9287-8238bb9bb602 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1086.136659] env[62619]: DEBUG oslo_vmware.api [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1086.136659] env[62619]: value = "task-1365414" [ 1086.136659] env[62619]: _type = "Task" [ 1086.136659] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1086.144192] env[62619]: DEBUG oslo_vmware.api [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365414, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1086.487458] env[62619]: DEBUG oslo_concurrency.lockutils [req-4fda89ca-7214-41b3-b16c-d35bd5403952 req-b4b5ec72-bb89-4fce-ba3f-2c8aea23e795 service nova] Releasing lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1086.487839] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1086.488015] env[62619]: DEBUG nova.network.neutron [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1086.646528] env[62619]: DEBUG oslo_vmware.api [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365414, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.155175} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1086.646755] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1086.646954] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1086.647154] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1086.647334] env[62619]: INFO nova.compute.manager [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Took 1.19 seconds to destroy the instance on the hypervisor. [ 1086.647577] env[62619]: DEBUG oslo.service.loopingcall [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1086.647768] env[62619]: DEBUG nova.compute.manager [-] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1086.647860] env[62619]: DEBUG nova.network.neutron [-] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1087.034044] env[62619]: DEBUG nova.network.neutron [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1087.197642] env[62619]: DEBUG nova.network.neutron [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Updating instance_info_cache with network_info: [{"id": "a761dd6a-1f50-4c60-8f2f-5b5acea9deab", "address": "fa:16:3e:5d:0c:95", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa761dd6a-1f", "ovs_interfaceid": "a761dd6a-1f50-4c60-8f2f-5b5acea9deab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.405855] env[62619]: DEBUG nova.compute.manager [req-b4134671-cc1e-4fed-be61-07eb79424a4a req-3e479792-db63-4f55-abea-4564a9fe65e9 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Received event network-vif-deleted-ec268e92-8fac-43c7-b39c-0518ca9a0403 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1087.406106] env[62619]: INFO nova.compute.manager [req-b4134671-cc1e-4fed-be61-07eb79424a4a req-3e479792-db63-4f55-abea-4564a9fe65e9 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Neutron deleted interface ec268e92-8fac-43c7-b39c-0518ca9a0403; detaching it from the instance and deleting it from the info cache [ 1087.406290] env[62619]: DEBUG nova.network.neutron [req-b4134671-cc1e-4fed-be61-07eb79424a4a req-3e479792-db63-4f55-abea-4564a9fe65e9 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.626123] env[62619]: DEBUG nova.network.neutron [-] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1087.700560] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1087.700916] env[62619]: DEBUG nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Instance network_info: |[{"id": "a761dd6a-1f50-4c60-8f2f-5b5acea9deab", "address": "fa:16:3e:5d:0c:95", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa761dd6a-1f", "ovs_interfaceid": "a761dd6a-1f50-4c60-8f2f-5b5acea9deab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1087.701393] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5d:0c:95', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a761dd6a-1f50-4c60-8f2f-5b5acea9deab', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1087.708806] env[62619]: DEBUG oslo.service.loopingcall [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1087.709338] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1087.709566] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-082dbc84-171d-433f-9ec1-22643a54e000 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.729136] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1087.729136] env[62619]: value = "task-1365415" [ 1087.729136] env[62619]: _type = "Task" [ 1087.729136] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1087.736548] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365415, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1087.908837] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3695b1fe-7a66-461a-b1dd-f16dbdf7d334 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.917880] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0465b47-2128-4a1a-bd3f-ae1d38cea5c9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1087.942247] env[62619]: DEBUG nova.compute.manager [req-b4134671-cc1e-4fed-be61-07eb79424a4a req-3e479792-db63-4f55-abea-4564a9fe65e9 service nova] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Detach interface failed, port_id=ec268e92-8fac-43c7-b39c-0518ca9a0403, reason: Instance 74aa214a-7eda-4613-a394-bc7477d3078e could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1088.129071] env[62619]: INFO nova.compute.manager [-] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Took 1.48 seconds to deallocate network for instance. [ 1088.239379] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365415, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1088.635547] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1088.635849] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1088.636158] env[62619]: DEBUG nova.objects.instance [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'resources' on Instance uuid 74aa214a-7eda-4613-a394-bc7477d3078e {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1088.739718] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365415, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.213644] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f722cc4-1c62-4599-a141-ff03bafc4d2e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.221276] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72b689e7-18d1-4912-be53-07ee69695606 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.253473] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abf4f86d-c8a3-4ab3-81ef-ca7739dae8a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.260593] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365415, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.263424] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d954bf6-3b76-4562-8cbb-d2297236d678 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1089.276374] env[62619]: DEBUG nova.compute.provider_tree [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1089.760559] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365415, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1089.779584] env[62619]: DEBUG nova.scheduler.client.report [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1090.261141] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365415, 'name': CreateVM_Task} progress is 25%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.283966] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.648s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1090.307920] env[62619]: INFO nova.scheduler.client.report [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Deleted allocations for instance 74aa214a-7eda-4613-a394-bc7477d3078e [ 1090.762489] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365415, 'name': CreateVM_Task, 'duration_secs': 2.937028} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1090.762489] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1090.762680] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1090.763354] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1090.763354] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1090.763354] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-10e81549-85e7-45f4-a075-e9591fa9ae3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1090.767783] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1090.767783] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52113ba8-6e62-fbfa-f0ab-a54f83435dd0" [ 1090.767783] env[62619]: _type = "Task" [ 1090.767783] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1090.776806] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52113ba8-6e62-fbfa-f0ab-a54f83435dd0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1090.815510] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4f67b7b6-936f-4dc2-a560-c3ce9d3b0b91 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "74aa214a-7eda-4613-a394-bc7477d3078e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.358s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1091.093295] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1091.093601] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1091.279175] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52113ba8-6e62-fbfa-f0ab-a54f83435dd0, 'name': SearchDatastore_Task, 'duration_secs': 0.009138} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.279445] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1091.279689] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1091.279928] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1091.280090] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1091.280274] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1091.280536] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a93706b6-c08f-49e2-ab6a-baa3cd31b206 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.288581] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1091.288763] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1091.289449] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f156eef-8dd2-4b9c-963d-4accde6f8216 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.294652] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1091.294652] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529e39d8-dd19-b4e6-cf79-e78f7edc8642" [ 1091.294652] env[62619]: _type = "Task" [ 1091.294652] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.301857] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529e39d8-dd19-b4e6-cf79-e78f7edc8642, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.597252] env[62619]: INFO nova.compute.manager [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Detaching volume 1192b48d-685e-4291-a41c-ab6536cdc58e [ 1091.635484] env[62619]: INFO nova.virt.block_device [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Attempting to driver detach volume 1192b48d-685e-4291-a41c-ab6536cdc58e from mountpoint /dev/sdb [ 1091.635772] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1091.635964] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290556', 'volume_id': '1192b48d-685e-4291-a41c-ab6536cdc58e', 'name': 'volume-1192b48d-685e-4291-a41c-ab6536cdc58e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2', 'attached_at': '', 'detached_at': '', 'volume_id': '1192b48d-685e-4291-a41c-ab6536cdc58e', 'serial': '1192b48d-685e-4291-a41c-ab6536cdc58e'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1091.636880] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b4fac2f-16d0-48bb-94c5-78a185fbe3f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.658833] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b13bee7-ede8-484b-9eba-7f1e722f8663 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.665832] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da929a26-6f02-46e5-9e22-03fe8c451bc7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.685496] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5599e792-e76f-432c-8c1a-0d32a0d2acfb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.700126] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] The volume has not been displaced from its original location: [datastore1] volume-1192b48d-685e-4291-a41c-ab6536cdc58e/volume-1192b48d-685e-4291-a41c-ab6536cdc58e.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1091.705206] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfiguring VM instance instance-0000005e to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1091.705526] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e709053-f720-462a-8481-67b4f2986e8f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.722909] env[62619]: DEBUG oslo_vmware.api [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1091.722909] env[62619]: value = "task-1365416" [ 1091.722909] env[62619]: _type = "Task" [ 1091.722909] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.730404] env[62619]: DEBUG oslo_vmware.api [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365416, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1091.804899] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]529e39d8-dd19-b4e6-cf79-e78f7edc8642, 'name': SearchDatastore_Task, 'duration_secs': 0.010285} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1091.805618] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c0377bc6-3081-4a40-928a-1294363ef722 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1091.812243] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1091.812243] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52625be9-ab8c-efd1-cdaf-30c0bc756ede" [ 1091.812243] env[62619]: _type = "Task" [ 1091.812243] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1091.819343] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52625be9-ab8c-efd1-cdaf-30c0bc756ede, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.207455] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1092.207698] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1092.232213] env[62619]: DEBUG oslo_vmware.api [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365416, 'name': ReconfigVM_Task, 'duration_secs': 0.209999} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.232471] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Reconfigured VM instance instance-0000005e to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1092.237294] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ca711f7-366b-44cb-8088-57c861d70d28 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.252366] env[62619]: DEBUG oslo_vmware.api [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1092.252366] env[62619]: value = "task-1365417" [ 1092.252366] env[62619]: _type = "Task" [ 1092.252366] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.260194] env[62619]: DEBUG oslo_vmware.api [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365417, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.322174] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52625be9-ab8c-efd1-cdaf-30c0bc756ede, 'name': SearchDatastore_Task, 'duration_secs': 0.008766} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.322517] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1092.322712] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f/bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1092.322973] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-010068be-4ac5-4626-9c91-dbf188baccea {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1092.329339] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1092.329339] env[62619]: value = "task-1365418" [ 1092.329339] env[62619]: _type = "Task" [ 1092.329339] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1092.336600] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365418, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1092.710764] env[62619]: DEBUG nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1092.765818] env[62619]: DEBUG oslo_vmware.api [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365417, 'name': ReconfigVM_Task, 'duration_secs': 0.130896} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1092.766224] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290556', 'volume_id': '1192b48d-685e-4291-a41c-ab6536cdc58e', 'name': 'volume-1192b48d-685e-4291-a41c-ab6536cdc58e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2', 'attached_at': '', 'detached_at': '', 'volume_id': '1192b48d-685e-4291-a41c-ab6536cdc58e', 'serial': '1192b48d-685e-4291-a41c-ab6536cdc58e'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1092.839862] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365418, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.232199] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1093.232470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1093.234015] env[62619]: INFO nova.compute.claims [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1093.314330] env[62619]: DEBUG nova.objects.instance [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lazy-loading 'flavor' on Instance uuid 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1093.341062] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365418, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.526328} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.341371] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f/bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1093.341569] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1093.341825] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c357e271-04fb-4069-949f-09c20303127a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.348234] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1093.348234] env[62619]: value = "task-1365419" [ 1093.348234] env[62619]: _type = "Task" [ 1093.348234] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.356674] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365419, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1093.858812] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365419, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.092208} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1093.859082] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1093.859820] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-567d7318-c0dc-47f2-84f8-0f66212f95ae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.880924] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Reconfiguring VM instance instance-00000069 to attach disk [datastore1] bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f/bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1093.881169] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-657db5f3-73d2-4d00-990b-9f2f4eeced70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1093.900556] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1093.900556] env[62619]: value = "task-1365420" [ 1093.900556] env[62619]: _type = "Task" [ 1093.900556] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1093.907859] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365420, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.320813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ff2998ba-fa2a-4d7d-add4-fa3dadd650b6 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.227s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1094.322414] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7af28b1d-2815-4fe5-9417-02f4c5e7549d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.329733] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57961c00-10ed-4bdd-b85b-0bb18bb56648 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.359090] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2619e867-dc3b-4725-b852-89bc8f6af9b3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.365760] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bdc884b-c6fa-4fc9-bbfd-a41058afa739 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.378410] env[62619]: DEBUG nova.compute.provider_tree [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1094.409306] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365420, 'name': ReconfigVM_Task, 'duration_secs': 0.396531} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.409560] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Reconfigured VM instance instance-00000069 to attach disk [datastore1] bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f/bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1094.410151] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-f387df40-4a35-4615-8e37-a87dc9127f26 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.416179] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1094.416179] env[62619]: value = "task-1365421" [ 1094.416179] env[62619]: _type = "Task" [ 1094.416179] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.426050] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365421, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1094.881532] env[62619]: DEBUG nova.scheduler.client.report [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1094.925616] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365421, 'name': Rename_Task, 'duration_secs': 0.164655} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1094.925928] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1094.926209] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-5f9edde4-1dfe-429d-b72b-bdf9329969f4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1094.932451] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1094.932451] env[62619]: value = "task-1365422" [ 1094.932451] env[62619]: _type = "Task" [ 1094.932451] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1094.941214] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365422, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.386475] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.154s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.387042] env[62619]: DEBUG nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1095.399391] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.399623] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.399830] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1095.400093] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1095.400349] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1095.402297] env[62619]: INFO nova.compute.manager [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Terminating instance [ 1095.404059] env[62619]: DEBUG nova.compute.manager [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1095.404184] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1095.405012] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a704106e-b3a5-4d6d-bbf9-c23c09dde207 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.412964] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1095.413210] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-7d2aeced-bf82-468b-8dc0-3415b573bf1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.419633] env[62619]: DEBUG oslo_vmware.api [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1095.419633] env[62619]: value = "task-1365423" [ 1095.419633] env[62619]: _type = "Task" [ 1095.419633] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1095.427925] env[62619]: DEBUG oslo_vmware.api [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365423, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1095.441062] env[62619]: DEBUG oslo_vmware.api [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365422, 'name': PowerOnVM_Task, 'duration_secs': 0.422426} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.441341] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1095.441578] env[62619]: INFO nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Took 10.78 seconds to spawn the instance on the hypervisor. [ 1095.441783] env[62619]: DEBUG nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1095.442657] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0241dedc-88b3-48aa-8df3-a71d8d6f6df7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.530499] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.530734] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1095.892574] env[62619]: DEBUG nova.compute.utils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1095.893967] env[62619]: DEBUG nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1095.894152] env[62619]: DEBUG nova.network.neutron [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1095.930054] env[62619]: DEBUG oslo_vmware.api [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365423, 'name': PowerOffVM_Task, 'duration_secs': 0.26993} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1095.930054] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1095.930054] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1095.930536] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d258e09f-40cd-41ed-bb0c-34a8a34b937e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1095.932793] env[62619]: DEBUG nova.policy [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'afafdd2ffb254ed88413f677c6480b27', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '382289eb2cfc4cb18bda69f887770db8', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1095.960885] env[62619]: INFO nova.compute.manager [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Took 15.48 seconds to build instance. [ 1096.038449] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1096.038593] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1096.185468] env[62619]: DEBUG nova.network.neutron [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Successfully created port: 15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1096.397096] env[62619]: DEBUG nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1096.462388] env[62619]: DEBUG oslo_concurrency.lockutils [None req-e61fcfc2-eaaa-41d4-b837-f4bc71029d7f tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 16.993s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1096.580039] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.580329] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquired lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.580600] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1096.588786] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1096.589175] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1096.589481] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleting the datastore file [datastore1] 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1096.590038] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf4149c9-5364-4e19-b2f0-06f86488f0f9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1096.599597] env[62619]: DEBUG oslo_vmware.api [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1096.599597] env[62619]: value = "task-1365425" [ 1096.599597] env[62619]: _type = "Task" [ 1096.599597] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1096.606461] env[62619]: DEBUG oslo_vmware.api [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365425, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1096.855886] env[62619]: DEBUG nova.compute.manager [req-13f17ca2-9150-40a4-a3ac-0bf1719ec76a req-367258fc-dfc5-4082-af15-24cb22dd182c service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Received event network-changed-a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1096.855988] env[62619]: DEBUG nova.compute.manager [req-13f17ca2-9150-40a4-a3ac-0bf1719ec76a req-367258fc-dfc5-4082-af15-24cb22dd182c service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Refreshing instance network info cache due to event network-changed-a761dd6a-1f50-4c60-8f2f-5b5acea9deab. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1096.856204] env[62619]: DEBUG oslo_concurrency.lockutils [req-13f17ca2-9150-40a4-a3ac-0bf1719ec76a req-367258fc-dfc5-4082-af15-24cb22dd182c service nova] Acquiring lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1096.856352] env[62619]: DEBUG oslo_concurrency.lockutils [req-13f17ca2-9150-40a4-a3ac-0bf1719ec76a req-367258fc-dfc5-4082-af15-24cb22dd182c service nova] Acquired lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1096.856514] env[62619]: DEBUG nova.network.neutron [req-13f17ca2-9150-40a4-a3ac-0bf1719ec76a req-367258fc-dfc5-4082-af15-24cb22dd182c service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Refreshing network info cache for port a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1097.107966] env[62619]: DEBUG oslo_vmware.api [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365425, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146866} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1097.107966] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1097.107966] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1097.108192] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1097.109536] env[62619]: INFO nova.compute.manager [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Took 1.70 seconds to destroy the instance on the hypervisor. [ 1097.109536] env[62619]: DEBUG oslo.service.loopingcall [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1097.109536] env[62619]: DEBUG nova.compute.manager [-] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1097.109536] env[62619]: DEBUG nova.network.neutron [-] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1097.409330] env[62619]: DEBUG nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1097.440028] env[62619]: DEBUG nova.virt.hardware [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1097.440028] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1020c192-2ca0-4f6f-b68e-f17961f95a0c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.452577] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0097159e-b6c4-4e18-ad6d-9f96db5f66c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1097.683116] env[62619]: DEBUG nova.compute.manager [req-692ccb4d-98bc-4c52-99d9-9c2bcfaa5724 req-1de97660-e6ce-4751-9c13-006b50f281c9 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Received event network-vif-plugged-15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1097.683302] env[62619]: DEBUG oslo_concurrency.lockutils [req-692ccb4d-98bc-4c52-99d9-9c2bcfaa5724 req-1de97660-e6ce-4751-9c13-006b50f281c9 service nova] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1097.683524] env[62619]: DEBUG oslo_concurrency.lockutils [req-692ccb4d-98bc-4c52-99d9-9c2bcfaa5724 req-1de97660-e6ce-4751-9c13-006b50f281c9 service nova] Lock "072a9ba2-95d0-40c3-b323-21130df616f6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1097.683687] env[62619]: DEBUG oslo_concurrency.lockutils [req-692ccb4d-98bc-4c52-99d9-9c2bcfaa5724 req-1de97660-e6ce-4751-9c13-006b50f281c9 service nova] Lock "072a9ba2-95d0-40c3-b323-21130df616f6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1097.683854] env[62619]: DEBUG nova.compute.manager [req-692ccb4d-98bc-4c52-99d9-9c2bcfaa5724 req-1de97660-e6ce-4751-9c13-006b50f281c9 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] No waiting events found dispatching network-vif-plugged-15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1097.684344] env[62619]: WARNING nova.compute.manager [req-692ccb4d-98bc-4c52-99d9-9c2bcfaa5724 req-1de97660-e6ce-4751-9c13-006b50f281c9 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Received unexpected event network-vif-plugged-15698219-89e4-4caa-a849-1b49d8beb863 for instance with vm_state building and task_state spawning. [ 1097.703585] env[62619]: DEBUG nova.network.neutron [req-13f17ca2-9150-40a4-a3ac-0bf1719ec76a req-367258fc-dfc5-4082-af15-24cb22dd182c service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Updated VIF entry in instance network info cache for port a761dd6a-1f50-4c60-8f2f-5b5acea9deab. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1097.703934] env[62619]: DEBUG nova.network.neutron [req-13f17ca2-9150-40a4-a3ac-0bf1719ec76a req-367258fc-dfc5-4082-af15-24cb22dd182c service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Updating instance_info_cache with network_info: [{"id": "a761dd6a-1f50-4c60-8f2f-5b5acea9deab", "address": "fa:16:3e:5d:0c:95", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.141", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa761dd6a-1f", "ovs_interfaceid": "a761dd6a-1f50-4c60-8f2f-5b5acea9deab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.778088] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updating instance_info_cache with network_info: [{"id": "53678dcd-6f40-4645-961d-a9c2608eeba8", "address": "fa:16:3e:bb:46:22", "network": {"id": "2e2bffa0-5ac0-4b02-b7dd-60e81a5f4de0", "bridge": "br-int", "label": "tempest-ServerRescueNegativeTestJSON-1902676628-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b6e92987102742d9b65b83850b6e5e7e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "d69a4b11-8d65-435f-94a5-28f74a39a718", "external-id": "cl2-zone-59", "segmentation_id": 59, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap53678dcd-6f", "ovs_interfaceid": "53678dcd-6f40-4645-961d-a9c2608eeba8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1097.784171] env[62619]: DEBUG nova.network.neutron [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Successfully updated port: 15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1098.061507] env[62619]: DEBUG nova.network.neutron [-] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1098.206547] env[62619]: DEBUG oslo_concurrency.lockutils [req-13f17ca2-9150-40a4-a3ac-0bf1719ec76a req-367258fc-dfc5-4082-af15-24cb22dd182c service nova] Releasing lock "refresh_cache-bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.281024] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Releasing lock "refresh_cache-02dec4f2-cbe7-4bb0-a57e-3970c5669354" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1098.281267] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1098.281492] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.281654] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.281799] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.281948] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.282109] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.282260] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.282384] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1098.282522] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1098.287030] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1098.287030] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1098.287030] env[62619]: DEBUG nova.network.neutron [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1098.567172] env[62619]: INFO nova.compute.manager [-] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Took 1.46 seconds to deallocate network for instance. [ 1098.786283] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.786638] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.786911] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1098.787174] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1098.788522] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f99fa88-9764-459a-afe0-9757f0d71f45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.802933] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf6e6564-692c-4b44-99bf-df7004ec32fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.824577] env[62619]: DEBUG nova.network.neutron [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1098.827925] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c04f3bf2-3df6-4a26-ad00-a1fc8f459de4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.837682] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-92035981-737d-48ac-a4ee-792f92bcdcf7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1098.889327] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180516MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1098.889564] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1098.889900] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1098.894539] env[62619]: DEBUG nova.compute.manager [req-283f143d-2a6c-47bb-a0ce-2f93fee6afad req-3300088d-c218-4e48-9998-ccbabbc008a5 service nova] [instance: 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2] Received event network-vif-deleted-7c383235-8fc6-41ec-b559-12d1ff143a09 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1099.012614] env[62619]: DEBUG nova.network.neutron [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updating instance_info_cache with network_info: [{"id": "15698219-89e4-4caa-a849-1b49d8beb863", "address": "fa:16:3e:c2:70:e8", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15698219-89", "ovs_interfaceid": "15698219-89e4-4caa-a849-1b49d8beb863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1099.074410] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1099.516102] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Releasing lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1099.516494] env[62619]: DEBUG nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Instance network_info: |[{"id": "15698219-89e4-4caa-a849-1b49d8beb863", "address": "fa:16:3e:c2:70:e8", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15698219-89", "ovs_interfaceid": "15698219-89e4-4caa-a849-1b49d8beb863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1099.516963] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:70:e8', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '748a5204-8f14-402c-9a6e-f3e6104db082', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '15698219-89e4-4caa-a849-1b49d8beb863', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1099.524407] env[62619]: DEBUG oslo.service.loopingcall [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1099.524631] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1099.525081] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f4ddadcc-9bd7-4766-9e75-ca1cf80e3634 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1099.553264] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1099.553264] env[62619]: value = "task-1365426" [ 1099.553264] env[62619]: _type = "Task" [ 1099.553264] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1099.565014] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365426, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1099.712044] env[62619]: DEBUG nova.compute.manager [req-87ce1b59-40d6-4ed8-b21a-2ef2cd2dcd96 req-f9e5dd39-4c2c-4f4a-9b43-dc10052234e1 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Received event network-changed-15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1099.712309] env[62619]: DEBUG nova.compute.manager [req-87ce1b59-40d6-4ed8-b21a-2ef2cd2dcd96 req-f9e5dd39-4c2c-4f4a-9b43-dc10052234e1 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Refreshing instance network info cache due to event network-changed-15698219-89e4-4caa-a849-1b49d8beb863. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1099.712581] env[62619]: DEBUG oslo_concurrency.lockutils [req-87ce1b59-40d6-4ed8-b21a-2ef2cd2dcd96 req-f9e5dd39-4c2c-4f4a-9b43-dc10052234e1 service nova] Acquiring lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1099.712762] env[62619]: DEBUG oslo_concurrency.lockutils [req-87ce1b59-40d6-4ed8-b21a-2ef2cd2dcd96 req-f9e5dd39-4c2c-4f4a-9b43-dc10052234e1 service nova] Acquired lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1099.712965] env[62619]: DEBUG nova.network.neutron [req-87ce1b59-40d6-4ed8-b21a-2ef2cd2dcd96 req-f9e5dd39-4c2c-4f4a-9b43-dc10052234e1 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Refreshing network info cache for port 15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1100.032848] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 02dec4f2-cbe7-4bb0-a57e-3970c5669354 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.033014] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.033177] env[62619]: WARNING nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2 is not being actively managed by this compute host but has allocations referencing this compute host: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. Skipping heal of allocation because we do not know what to do. [ 1100.033299] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 460c12f6-5eb8-427c-bda4-6773e1bc9034 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.033461] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.033529] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 072a9ba2-95d0-40c3-b323-21130df616f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1100.033703] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1100.033838] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1472MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1100.065424] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365426, 'name': CreateVM_Task, 'duration_secs': 0.317939} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.065603] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1100.066340] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.066516] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.066841] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1100.067160] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-548fcd71-457c-4f00-8ad2-875de10baeb7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.073898] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1100.073898] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5242d1b6-8cbd-c2bf-1e67-2f16ddaa9542" [ 1100.073898] env[62619]: _type = "Task" [ 1100.073898] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.081468] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5242d1b6-8cbd-c2bf-1e67-2f16ddaa9542, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.122475] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21389337-c82d-490a-a1e8-ad6e42c3df97 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.129961] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2204396e-363c-4a9a-a469-3769da0444bb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.160523] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4915d63d-6d09-46e7-8e04-a4a431b909b5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.167867] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee6de580-09bb-4ce7-9fc7-5af3e039f459 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.181864] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1100.413892] env[62619]: DEBUG nova.network.neutron [req-87ce1b59-40d6-4ed8-b21a-2ef2cd2dcd96 req-f9e5dd39-4c2c-4f4a-9b43-dc10052234e1 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updated VIF entry in instance network info cache for port 15698219-89e4-4caa-a849-1b49d8beb863. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1100.414320] env[62619]: DEBUG nova.network.neutron [req-87ce1b59-40d6-4ed8-b21a-2ef2cd2dcd96 req-f9e5dd39-4c2c-4f4a-9b43-dc10052234e1 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updating instance_info_cache with network_info: [{"id": "15698219-89e4-4caa-a849-1b49d8beb863", "address": "fa:16:3e:c2:70:e8", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15698219-89", "ovs_interfaceid": "15698219-89e4-4caa-a849-1b49d8beb863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1100.584433] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5242d1b6-8cbd-c2bf-1e67-2f16ddaa9542, 'name': SearchDatastore_Task, 'duration_secs': 0.013142} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1100.584736] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1100.585151] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1100.585415] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1100.585572] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1100.585754] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1100.586062] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cd10d9a2-8223-4aeb-a0f4-1062b01dc7e9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.593933] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1100.594124] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1100.594824] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ddda7801-fc6c-4ff6-977d-2572f8ff9d3f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1100.599683] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1100.599683] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5227b2ba-d85a-f78f-21e9-2989321fd870" [ 1100.599683] env[62619]: _type = "Task" [ 1100.599683] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1100.606949] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5227b2ba-d85a-f78f-21e9-2989321fd870, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1100.684437] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1100.917186] env[62619]: DEBUG oslo_concurrency.lockutils [req-87ce1b59-40d6-4ed8-b21a-2ef2cd2dcd96 req-f9e5dd39-4c2c-4f4a-9b43-dc10052234e1 service nova] Releasing lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.109463] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5227b2ba-d85a-f78f-21e9-2989321fd870, 'name': SearchDatastore_Task, 'duration_secs': 0.008611} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.110260] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ca3334e5-b867-4156-bac7-018ed59bbee7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.115061] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1101.115061] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5279fe1b-01ea-bf27-6399-a38fc3f52f71" [ 1101.115061] env[62619]: _type = "Task" [ 1101.115061] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.122310] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5279fe1b-01ea-bf27-6399-a38fc3f52f71, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.189198] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1101.189339] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.300s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.189589] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 2.116s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1101.189776] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1101.191673] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1101.191820] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Cleaning up deleted instances {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11228}} [ 1101.216774] env[62619]: INFO nova.scheduler.client.report [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleted allocations for instance 52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2 [ 1101.625120] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5279fe1b-01ea-bf27-6399-a38fc3f52f71, 'name': SearchDatastore_Task, 'duration_secs': 0.009265} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1101.625397] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1101.625656] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 072a9ba2-95d0-40c3-b323-21130df616f6/072a9ba2-95d0-40c3-b323-21130df616f6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1101.625969] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-829c13d7-7db1-4712-8982-9533ceac5954 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1101.632827] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1101.632827] env[62619]: value = "task-1365427" [ 1101.632827] env[62619]: _type = "Task" [ 1101.632827] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1101.640090] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365427, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1101.703075] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] There are 36 instances to clean {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11237}} [ 1101.703327] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 7dd76e44-c5d3-4178-8942-6eb6e01edbcd] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1101.728703] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8fea9e5d-c904-4d29-ad65-ae858804c3cb tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "52ad38ce-31d2-4fa2-a9a3-61c6a82e8aa2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 6.329s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1102.142790] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365427, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.456538} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.145042] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 072a9ba2-95d0-40c3-b323-21130df616f6/072a9ba2-95d0-40c3-b323-21130df616f6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1102.145042] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1102.145042] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-a7b1ffa7-8e0c-4083-970c-dcd684776138 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.149575] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1102.149575] env[62619]: value = "task-1365429" [ 1102.149575] env[62619]: _type = "Task" [ 1102.149575] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.156560] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365429, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1102.206243] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 27280d82-22ce-4312-8ff2-216d4364a889] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1102.659448] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365429, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.061959} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1102.659728] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1102.660484] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d949a94-5536-463b-9c95-b02bd2a769ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.681522] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfiguring VM instance instance-0000006a to attach disk [datastore2] 072a9ba2-95d0-40c3-b323-21130df616f6/072a9ba2-95d0-40c3-b323-21130df616f6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1102.681730] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-7e7a5650-d344-4bb5-b31f-ef2d9c4f8774 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1102.701270] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1102.701270] env[62619]: value = "task-1365430" [ 1102.701270] env[62619]: _type = "Task" [ 1102.701270] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1102.709392] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: c0cbe0ed-94e0-4d02-ae7a-2589938f4c41] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1102.711179] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365430, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.212018] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: bff6a2d3-3d0c-42df-8129-f78e321d1b7e] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1103.213825] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365430, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.712544] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365430, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.715083] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: fafa3bc8-5ba5-48e2-8dc3-354a2f9eb542] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1104.212734] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365430, 'name': ReconfigVM_Task, 'duration_secs': 1.257191} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.213096] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfigured VM instance instance-0000006a to attach disk [datastore2] 072a9ba2-95d0-40c3-b323-21130df616f6/072a9ba2-95d0-40c3-b323-21130df616f6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1104.213613] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-5a3c07e1-9136-4d12-9525-6e01f06b9406 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.217660] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 48f193f6-9928-4098-8830-dadda0eb11e6] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1104.220332] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1104.220332] env[62619]: value = "task-1365431" [ 1104.220332] env[62619]: _type = "Task" [ 1104.220332] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.227796] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365431, 'name': Rename_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.721761] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: f5560f62-634d-42e5-9354-68089db01e33] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1104.733809] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365431, 'name': Rename_Task, 'duration_secs': 0.144231} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.734100] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1104.734349] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-2bc4da19-6a31-469f-a43b-761585441c7b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.740654] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1104.740654] env[62619]: value = "task-1365432" [ 1104.740654] env[62619]: _type = "Task" [ 1104.740654] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1104.748989] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365432, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.230081] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 1c406554-d91d-422a-9a5a-9e910fc51103] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1105.252432] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365432, 'name': PowerOnVM_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1105.733889] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 69e916ee-9edc-4e1b-85a0-40142364e3bb] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1105.750822] env[62619]: DEBUG oslo_vmware.api [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365432, 'name': PowerOnVM_Task, 'duration_secs': 0.538507} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1105.751128] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1105.751365] env[62619]: INFO nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Took 8.34 seconds to spawn the instance on the hypervisor. [ 1105.751550] env[62619]: DEBUG nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1105.752385] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f19e5496-0022-4991-a2e0-4d9f11139c68 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.237479] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: f3638527-351b-4ea8-bf7d-6e90bc68abe4] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1106.268209] env[62619]: INFO nova.compute.manager [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Took 13.05 seconds to build instance. [ 1106.524610] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.524871] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.525161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1106.525364] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1106.525545] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1106.527626] env[62619]: INFO nova.compute.manager [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Terminating instance [ 1106.529348] env[62619]: DEBUG nova.compute.manager [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1106.529548] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1106.530399] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70da8b78-d7ec-4051-a18a-e3426b61d4d4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.538353] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1106.538583] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-c57d6333-769f-4e76-958c-072b69981542 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.545162] env[62619]: DEBUG oslo_vmware.api [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1106.545162] env[62619]: value = "task-1365433" [ 1106.545162] env[62619]: _type = "Task" [ 1106.545162] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1106.552344] env[62619]: DEBUG oslo_vmware.api [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365433, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1106.741296] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 1cbb03c7-5b97-4c3b-aee0-5fa948027a4e] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1106.770294] env[62619]: DEBUG oslo_concurrency.lockutils [None req-4da285ca-112b-42c4-82b1-2787ef23003e tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 14.562s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1107.055605] env[62619]: DEBUG oslo_vmware.api [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365433, 'name': PowerOffVM_Task, 'duration_secs': 0.194628} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.055883] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1107.056096] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1107.056363] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ca5586f4-5d99-4795-a2f5-8a2b18a4536f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.115676] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1107.115832] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1107.116080] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleting the datastore file [datastore2] 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1107.116303] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-364df110-13e3-4106-ab61-70804c8f6f34 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.121917] env[62619]: DEBUG oslo_vmware.api [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1107.121917] env[62619]: value = "task-1365435" [ 1107.121917] env[62619]: _type = "Task" [ 1107.121917] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.130713] env[62619]: DEBUG oslo_vmware.api [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365435, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1107.244541] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 04e1e5ff-6385-4c3d-a226-355a171f7de0] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1107.632969] env[62619]: DEBUG oslo_vmware.api [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365435, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.170175} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1107.633265] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1107.633459] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1107.633640] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1107.633814] env[62619]: INFO nova.compute.manager [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1107.634083] env[62619]: DEBUG oslo.service.loopingcall [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1107.634284] env[62619]: DEBUG nova.compute.manager [-] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1107.634378] env[62619]: DEBUG nova.network.neutron [-] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1107.640649] env[62619]: DEBUG nova.compute.manager [req-90514197-68b9-48cf-8b2f-d60f41a5b2cb req-2ae2ad71-5639-4dad-bf40-d92452f74199 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Received event network-changed-15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1107.640881] env[62619]: DEBUG nova.compute.manager [req-90514197-68b9-48cf-8b2f-d60f41a5b2cb req-2ae2ad71-5639-4dad-bf40-d92452f74199 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Refreshing instance network info cache due to event network-changed-15698219-89e4-4caa-a849-1b49d8beb863. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1107.641199] env[62619]: DEBUG oslo_concurrency.lockutils [req-90514197-68b9-48cf-8b2f-d60f41a5b2cb req-2ae2ad71-5639-4dad-bf40-d92452f74199 service nova] Acquiring lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1107.641424] env[62619]: DEBUG oslo_concurrency.lockutils [req-90514197-68b9-48cf-8b2f-d60f41a5b2cb req-2ae2ad71-5639-4dad-bf40-d92452f74199 service nova] Acquired lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1107.641674] env[62619]: DEBUG nova.network.neutron [req-90514197-68b9-48cf-8b2f-d60f41a5b2cb req-2ae2ad71-5639-4dad-bf40-d92452f74199 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Refreshing network info cache for port 15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1107.748346] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 4bc613db-af56-48b4-8c24-7f44428d8b4f] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1107.891589] env[62619]: DEBUG nova.compute.manager [req-f5a6ce71-28a8-4421-87fe-c4b32feb41ba req-18588744-9379-40f4-b2bb-253d027e20b4 service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Received event network-vif-deleted-b144833e-a8d9-4967-bb72-78e71720d55e {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1107.891801] env[62619]: INFO nova.compute.manager [req-f5a6ce71-28a8-4421-87fe-c4b32feb41ba req-18588744-9379-40f4-b2bb-253d027e20b4 service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Neutron deleted interface b144833e-a8d9-4967-bb72-78e71720d55e; detaching it from the instance and deleting it from the info cache [ 1107.892067] env[62619]: DEBUG nova.network.neutron [req-f5a6ce71-28a8-4421-87fe-c4b32feb41ba req-18588744-9379-40f4-b2bb-253d027e20b4 service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.251929] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 74aa214a-7eda-4613-a394-bc7477d3078e] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1108.351448] env[62619]: DEBUG nova.network.neutron [req-90514197-68b9-48cf-8b2f-d60f41a5b2cb req-2ae2ad71-5639-4dad-bf40-d92452f74199 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updated VIF entry in instance network info cache for port 15698219-89e4-4caa-a849-1b49d8beb863. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1108.351806] env[62619]: DEBUG nova.network.neutron [req-90514197-68b9-48cf-8b2f-d60f41a5b2cb req-2ae2ad71-5639-4dad-bf40-d92452f74199 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updating instance_info_cache with network_info: [{"id": "15698219-89e4-4caa-a849-1b49d8beb863", "address": "fa:16:3e:c2:70:e8", "network": {"id": "0cec34fd-9069-4f5d-9243-ebded046c967", "bridge": "br-int", "label": "tempest-AttachVolumeTestJSON-1484784001-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.218", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "382289eb2cfc4cb18bda69f887770db8", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "748a5204-8f14-402c-9a6e-f3e6104db082", "external-id": "nsx-vlan-transportzone-750", "segmentation_id": 750, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap15698219-89", "ovs_interfaceid": "15698219-89e4-4caa-a849-1b49d8beb863", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.364649] env[62619]: DEBUG nova.network.neutron [-] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1108.394917] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-76bc0c26-2832-4633-abcd-ee63d4796a43 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.405012] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1287a0e-1c7f-4309-9bc8-af48207915aa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.430468] env[62619]: DEBUG nova.compute.manager [req-f5a6ce71-28a8-4421-87fe-c4b32feb41ba req-18588744-9379-40f4-b2bb-253d027e20b4 service nova] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Detach interface failed, port_id=b144833e-a8d9-4967-bb72-78e71720d55e, reason: Instance 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1108.755615] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: c4d0e4fc-9ce2-4ebc-8ede-337f843ec855] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1108.854558] env[62619]: DEBUG oslo_concurrency.lockutils [req-90514197-68b9-48cf-8b2f-d60f41a5b2cb req-2ae2ad71-5639-4dad-bf40-d92452f74199 service nova] Releasing lock "refresh_cache-072a9ba2-95d0-40c3-b323-21130df616f6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1108.867376] env[62619]: INFO nova.compute.manager [-] [instance: 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f] Took 1.23 seconds to deallocate network for instance. [ 1109.259163] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: ef1e80cf-2ea2-4764-851a-8aa97563a278] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1109.374391] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1109.374669] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1109.374892] env[62619]: DEBUG nova.objects.instance [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lazy-loading 'resources' on Instance uuid 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1109.762243] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 42aeba4e-5c87-46d5-9c7c-c6f263c69171] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1109.948616] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-83035866-84f3-4a05-8cd7-129212b8e7e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.955921] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4da7a7b-8fec-4fd3-8596-5dd44a3be2c8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.985256] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0927bfb5-d038-475f-ba40-92c0b5a2d0ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1109.992470] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a8c3810-fd0b-458c-aad6-d356cfb2af0a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1110.004951] env[62619]: DEBUG nova.compute.provider_tree [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1110.265646] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 85e279da-e067-46f8-929b-87a013c4e7f4] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1110.507977] env[62619]: DEBUG nova.scheduler.client.report [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1110.769431] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 9f7d7830-b878-41b9-a236-f7cd5580cf1d] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1111.012926] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.638s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.031669] env[62619]: INFO nova.scheduler.client.report [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleted allocations for instance 495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f [ 1111.273125] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 8c07697f-0e20-4ec5-88ec-ec4420906313] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1111.543725] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ade1ca8b-17b1-428b-85d9-b741c7b15b93 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "495d8d2c-ef50-4dd6-a89c-ef77fcb2ee9f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.019s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1111.776197] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 33998dc6-3be4-4b78-af12-0ad7bfab70c6] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1112.044515] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.044793] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.045043] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1112.045241] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1112.045415] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1112.047840] env[62619]: INFO nova.compute.manager [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Terminating instance [ 1112.049753] env[62619]: DEBUG nova.compute.manager [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1112.049945] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1112.050786] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2efe18e-d386-4b26-9015-a21a6a8aa7ab {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.058918] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1112.059168] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-6309e1e9-09db-4dd6-97f9-437c08c01941 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.065055] env[62619]: DEBUG oslo_vmware.api [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1112.065055] env[62619]: value = "task-1365436" [ 1112.065055] env[62619]: _type = "Task" [ 1112.065055] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.072371] env[62619]: DEBUG oslo_vmware.api [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365436, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.279454] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 11542a9b-6556-4b4b-88fe-26c6be2969f6] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1112.575371] env[62619]: DEBUG oslo_vmware.api [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365436, 'name': PowerOffVM_Task, 'duration_secs': 0.304197} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1112.575490] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1112.575647] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1112.575898] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-735f403f-d516-448e-8504-96153deb8c19 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.629674] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1112.629910] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1112.630109] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleting the datastore file [datastore2] 02dec4f2-cbe7-4bb0-a57e-3970c5669354 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1112.630384] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-a55dad3c-98e2-430d-9ee7-bce6efe56e5b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1112.636861] env[62619]: DEBUG oslo_vmware.api [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for the task: (returnval){ [ 1112.636861] env[62619]: value = "task-1365438" [ 1112.636861] env[62619]: _type = "Task" [ 1112.636861] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1112.644446] env[62619]: DEBUG oslo_vmware.api [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365438, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1112.783340] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: da5a8beb-0246-43df-9813-436ddf8598a9] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.146803] env[62619]: DEBUG oslo_vmware.api [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Task: {'id': task-1365438, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144695} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1113.147067] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1113.147200] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1113.147387] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1113.147559] env[62619]: INFO nova.compute.manager [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1113.147802] env[62619]: DEBUG oslo.service.loopingcall [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1113.147989] env[62619]: DEBUG nova.compute.manager [-] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1113.148097] env[62619]: DEBUG nova.network.neutron [-] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1113.286536] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: fa4e9947-5b99-4447-9535-6dbcaba635f8] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.400048] env[62619]: DEBUG nova.compute.manager [req-26769871-8879-460c-958e-3414aaadc628 req-155ba38b-b4cb-4619-b66e-4b5b32a61242 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Received event network-vif-deleted-53678dcd-6f40-4645-961d-a9c2608eeba8 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1113.400269] env[62619]: INFO nova.compute.manager [req-26769871-8879-460c-958e-3414aaadc628 req-155ba38b-b4cb-4619-b66e-4b5b32a61242 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Neutron deleted interface 53678dcd-6f40-4645-961d-a9c2608eeba8; detaching it from the instance and deleting it from the info cache [ 1113.400445] env[62619]: DEBUG nova.network.neutron [req-26769871-8879-460c-958e-3414aaadc628 req-155ba38b-b4cb-4619-b66e-4b5b32a61242 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.790445] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 3ac4cfee-ae73-4ef4-bef1-7bc1ace1566a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1113.878544] env[62619]: DEBUG nova.network.neutron [-] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1113.902535] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4bd84469-5c8f-4e0f-9646-289fa74bff03 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.911695] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2b5b9d3-f831-4d53-acb9-995a17fa0d1c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1113.934535] env[62619]: DEBUG nova.compute.manager [req-26769871-8879-460c-958e-3414aaadc628 req-155ba38b-b4cb-4619-b66e-4b5b32a61242 service nova] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Detach interface failed, port_id=53678dcd-6f40-4645-961d-a9c2608eeba8, reason: Instance 02dec4f2-cbe7-4bb0-a57e-3970c5669354 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1114.294215] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 1c1b7717-30a9-40c9-913f-6d65a619b94a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.380864] env[62619]: INFO nova.compute.manager [-] [instance: 02dec4f2-cbe7-4bb0-a57e-3970c5669354] Took 1.23 seconds to deallocate network for instance. [ 1114.798050] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: f60e0bec-0811-4e91-bc45-b61874846497] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1114.887737] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1114.888029] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1114.888272] env[62619]: DEBUG nova.objects.instance [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lazy-loading 'resources' on Instance uuid 02dec4f2-cbe7-4bb0-a57e-3970c5669354 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1115.301938] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 055a1071-bd4b-4d1b-88c0-7551a07aee9a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1115.470237] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd6cf195-10a1-4ba0-a823-66154c3e2258 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.478449] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1da212a3-6771-493d-9f44-643b77382424 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.509137] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e1a014-00a1-4b8b-9750-a5d6c165efe3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.516805] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0e3547b-de39-4d94-910d-ffea2a61789b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1115.530325] env[62619]: DEBUG nova.compute.provider_tree [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1115.805848] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 59b960b6-aa41-4409-a899-9829388c3ff2] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.033296] env[62619]: DEBUG nova.scheduler.client.report [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1116.309060] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: ca452ef6-d777-46dd-a313-ae7dd441adca] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1116.538169] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.650s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1116.556147] env[62619]: INFO nova.scheduler.client.report [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Deleted allocations for instance 02dec4f2-cbe7-4bb0-a57e-3970c5669354 [ 1116.812459] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 646b4ae6-09e1-4b3c-b17d-392e746df454] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1117.063356] env[62619]: DEBUG oslo_concurrency.lockutils [None req-77f2df81-bf61-4b55-b402-0f7b43596c06 tempest-ServerRescueNegativeTestJSON-159554906 tempest-ServerRescueNegativeTestJSON-159554906-project-member] Lock "02dec4f2-cbe7-4bb0-a57e-3970c5669354" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.018s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1117.316362] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: cfa74201-783f-4ef4-8860-e2f53e4dfb81] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1117.819599] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: c30e0db3-9b63-44b7-9b7f-810defc530d1] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1118.323752] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 29f16f05-fe2f-4c16-ab8c-6fb210bbce8a] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1118.827170] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 61d68c36-5251-4fad-9d3b-125296ae0861] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.331365] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: dfe3d851-8b82-4d0f-9346-b22a5b4ec1fe] Instance has had 0 of 5 cleanup attempts {{(pid=62619) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11241}} [ 1119.835081] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.835081] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Cleaning up deleted instances with incomplete migration {{(pid=62619) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11266}} [ 1120.337737] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1133.729590] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.729990] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.730088] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1133.730281] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1133.730457] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1133.733676] env[62619]: INFO nova.compute.manager [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Terminating instance [ 1133.735743] env[62619]: DEBUG nova.compute.manager [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1133.735943] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1133.736845] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-caf67bf2-b7b7-43e2-8c23-c9580e7d9588 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.744849] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1133.745105] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-1f36291d-ca57-47c4-a11b-39167bf5011e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1133.751779] env[62619]: DEBUG oslo_vmware.api [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1133.751779] env[62619]: value = "task-1365439" [ 1133.751779] env[62619]: _type = "Task" [ 1133.751779] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1133.759148] env[62619]: DEBUG oslo_vmware.api [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365439, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.261963] env[62619]: DEBUG oslo_vmware.api [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365439, 'name': PowerOffVM_Task, 'duration_secs': 0.188405} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.262251] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1134.262374] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1134.262671] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-14b951f4-1ea8-44ae-92ce-c12328b67b55 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.318715] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1134.318956] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1134.319168] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleting the datastore file [datastore1] bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1134.319454] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-12292155-358d-40e9-836f-f78caa3dd0d5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1134.326017] env[62619]: DEBUG oslo_vmware.api [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1134.326017] env[62619]: value = "task-1365441" [ 1134.326017] env[62619]: _type = "Task" [ 1134.326017] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1134.333697] env[62619]: DEBUG oslo_vmware.api [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365441, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1134.835323] env[62619]: DEBUG oslo_vmware.api [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365441, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.129374} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1134.835752] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1134.835877] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1134.836031] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1134.836216] env[62619]: INFO nova.compute.manager [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1134.836488] env[62619]: DEBUG oslo.service.loopingcall [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1134.836762] env[62619]: DEBUG nova.compute.manager [-] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1134.836863] env[62619]: DEBUG nova.network.neutron [-] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1135.432054] env[62619]: DEBUG nova.compute.manager [req-11ae1f00-3bf5-42f4-90ed-d4c776f7442f req-8115c201-1779-4703-9155-eb70252a8f22 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Received event network-vif-deleted-a761dd6a-1f50-4c60-8f2f-5b5acea9deab {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1135.432281] env[62619]: INFO nova.compute.manager [req-11ae1f00-3bf5-42f4-90ed-d4c776f7442f req-8115c201-1779-4703-9155-eb70252a8f22 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Neutron deleted interface a761dd6a-1f50-4c60-8f2f-5b5acea9deab; detaching it from the instance and deleting it from the info cache [ 1135.432459] env[62619]: DEBUG nova.network.neutron [req-11ae1f00-3bf5-42f4-90ed-d4c776f7442f req-8115c201-1779-4703-9155-eb70252a8f22 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.911749] env[62619]: DEBUG nova.network.neutron [-] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1135.937031] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-4b1be287-789d-44ac-a1a4-8c54486e51a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.946027] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e7730fa9-5b15-4583-ae49-3195fc7fc7a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1135.968857] env[62619]: DEBUG nova.compute.manager [req-11ae1f00-3bf5-42f4-90ed-d4c776f7442f req-8115c201-1779-4703-9155-eb70252a8f22 service nova] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Detach interface failed, port_id=a761dd6a-1f50-4c60-8f2f-5b5acea9deab, reason: Instance bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1136.414457] env[62619]: INFO nova.compute.manager [-] [instance: bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f] Took 1.58 seconds to deallocate network for instance. [ 1136.921018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1136.921389] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1136.921543] env[62619]: DEBUG nova.objects.instance [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'resources' on Instance uuid bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1137.474705] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3084c54d-6db3-4890-ad3a-285488abc00d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.482287] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67f54e0d-2329-4c05-a471-a6d22bbca658 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.511872] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a5fcb6-e20a-46fb-bb04-64dc3ce312a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.518684] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10d1a894-6560-46e3-8446-a1095628c4b1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1137.531261] env[62619]: DEBUG nova.compute.provider_tree [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1138.034317] env[62619]: DEBUG nova.scheduler.client.report [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1138.539940] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.618s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1138.562689] env[62619]: INFO nova.scheduler.client.report [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleted allocations for instance bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f [ 1139.070570] env[62619]: DEBUG oslo_concurrency.lockutils [None req-3f198896-0ada-4f7a-a78f-6e7de7f1c8d0 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "bbf2e58d-35b4-4aa1-bfe5-e4f956ed507f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.341s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1139.651151] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1139.651474] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1140.154797] env[62619]: INFO nova.compute.manager [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Detaching volume 89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5 [ 1140.214101] env[62619]: INFO nova.virt.block_device [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Attempting to driver detach volume 89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5 from mountpoint /dev/sdb [ 1140.214520] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1140.214845] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290560', 'volume_id': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'name': 'volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '460c12f6-5eb8-427c-bda4-6773e1bc9034', 'attached_at': '', 'detached_at': '', 'volume_id': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'serial': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1140.215791] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-affec6df-11e0-420b-a19e-99d3b9ff868b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.236640] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c5ec188-7693-41a1-8241-c6610d887f44 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.242760] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-175fec1a-7cd9-4d27-8c47-749b1fda5479 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.263124] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-beb4baf7-578d-43c4-9d21-02c3edf7ca8a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.277054] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] The volume has not been displaced from its original location: [datastore1] volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5/volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1140.282289] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Reconfiguring VM instance instance-00000065 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1140.282576] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4de8890b-cc3f-49b7-ac78-bd5ce416944c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.300015] env[62619]: DEBUG oslo_vmware.api [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1140.300015] env[62619]: value = "task-1365442" [ 1140.300015] env[62619]: _type = "Task" [ 1140.300015] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.307216] env[62619]: DEBUG oslo_vmware.api [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365442, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1140.810026] env[62619]: DEBUG oslo_vmware.api [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365442, 'name': ReconfigVM_Task, 'duration_secs': 0.216276} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1140.810481] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Reconfigured VM instance instance-00000065 to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1140.815111] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-88d5b9fd-68df-4cc0-b348-83d845e0375e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1140.829815] env[62619]: DEBUG oslo_vmware.api [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1140.829815] env[62619]: value = "task-1365443" [ 1140.829815] env[62619]: _type = "Task" [ 1140.829815] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1140.837509] env[62619]: DEBUG oslo_vmware.api [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365443, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1141.339888] env[62619]: DEBUG oslo_vmware.api [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365443, 'name': ReconfigVM_Task, 'duration_secs': 0.140184} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1141.340310] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290560', 'volume_id': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'name': 'volume-89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '460c12f6-5eb8-427c-bda4-6773e1bc9034', 'attached_at': '', 'detached_at': '', 'volume_id': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5', 'serial': '89c5ca7f-e5c3-44a8-9fba-dd898e7d6cb5'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1141.878418] env[62619]: DEBUG nova.objects.instance [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'flavor' on Instance uuid 460c12f6-5eb8-427c-bda4-6773e1bc9034 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1142.885909] env[62619]: DEBUG oslo_concurrency.lockutils [None req-38c133cf-8fe2-471b-b863-656f12c6cfdb tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.234s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.771286] env[62619]: DEBUG oslo_concurrency.lockutils [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.771531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.881145] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.881413] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.881625] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "460c12f6-5eb8-427c-bda4-6773e1bc9034-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1143.881813] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1143.881983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1143.884051] env[62619]: INFO nova.compute.manager [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Terminating instance [ 1143.885779] env[62619]: DEBUG nova.compute.manager [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1143.885974] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1143.886883] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8895eaa0-041f-4c43-8fc4-9c92fa8c8542 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.894444] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1143.894678] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-399e0b2b-8f1d-4a22-8d39-0d94f2d4f5cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1143.900239] env[62619]: DEBUG oslo_vmware.api [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1143.900239] env[62619]: value = "task-1365444" [ 1143.900239] env[62619]: _type = "Task" [ 1143.900239] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1143.908600] env[62619]: DEBUG oslo_vmware.api [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365444, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.274623] env[62619]: DEBUG nova.compute.utils [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1144.409599] env[62619]: DEBUG oslo_vmware.api [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365444, 'name': PowerOffVM_Task, 'duration_secs': 0.164136} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.409854] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1144.410032] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1144.410286] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-36f2e42a-652e-4b5d-b221-6083a81ccb45 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.465895] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1144.466144] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1144.466332] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleting the datastore file [datastore1] 460c12f6-5eb8-427c-bda4-6773e1bc9034 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1144.466601] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-60ed04bd-8fbb-410a-801c-9760ab2334f2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1144.473015] env[62619]: DEBUG oslo_vmware.api [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1144.473015] env[62619]: value = "task-1365446" [ 1144.473015] env[62619]: _type = "Task" [ 1144.473015] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1144.479952] env[62619]: DEBUG oslo_vmware.api [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365446, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1144.777611] env[62619]: DEBUG oslo_concurrency.lockutils [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.006s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1144.983567] env[62619]: DEBUG oslo_vmware.api [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365446, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.142111} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1144.983914] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1144.983951] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1144.984121] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1144.984299] env[62619]: INFO nova.compute.manager [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1144.984543] env[62619]: DEBUG oslo.service.loopingcall [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1144.984738] env[62619]: DEBUG nova.compute.manager [-] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1144.984833] env[62619]: DEBUG nova.network.neutron [-] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1145.440954] env[62619]: DEBUG nova.compute.manager [req-522bbc78-8ea5-42fa-88a6-0b5ef9ca3687 req-6cb20dc8-3c65-4d4e-a1fe-dc83df957a88 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Received event network-vif-deleted-91d6b3d0-bc47-408b-8f95-471b793c1330 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1145.441197] env[62619]: INFO nova.compute.manager [req-522bbc78-8ea5-42fa-88a6-0b5ef9ca3687 req-6cb20dc8-3c65-4d4e-a1fe-dc83df957a88 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Neutron deleted interface 91d6b3d0-bc47-408b-8f95-471b793c1330; detaching it from the instance and deleting it from the info cache [ 1145.441362] env[62619]: DEBUG nova.network.neutron [req-522bbc78-8ea5-42fa-88a6-0b5ef9ca3687 req-6cb20dc8-3c65-4d4e-a1fe-dc83df957a88 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.840786] env[62619]: DEBUG oslo_concurrency.lockutils [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1145.841079] env[62619]: DEBUG oslo_concurrency.lockutils [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1145.841343] env[62619]: INFO nova.compute.manager [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Attaching volume e50f943a-d0e0-4992-bcd6-2f3a753eb8f1 to /dev/sdb [ 1145.874361] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd5961c2-b7b5-436a-b06e-5e928706768b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.881319] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6c2557bb-f445-4e75-a3a6-e88523c3e635 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.893561] env[62619]: DEBUG nova.virt.block_device [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updating existing volume attachment record: fa3a0ae1-5a6b-4d99-ad95-59e0b6cc3f65 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1145.921464] env[62619]: DEBUG nova.network.neutron [-] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1145.943873] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c6f5a2a0-63bd-4142-a2ca-f155f6c9c28d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.954039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d0e0ced-7a3e-45a0-90dc-e7da1a6c4c57 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1145.976898] env[62619]: DEBUG nova.compute.manager [req-522bbc78-8ea5-42fa-88a6-0b5ef9ca3687 req-6cb20dc8-3c65-4d4e-a1fe-dc83df957a88 service nova] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Detach interface failed, port_id=91d6b3d0-bc47-408b-8f95-471b793c1330, reason: Instance 460c12f6-5eb8-427c-bda4-6773e1bc9034 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1146.423416] env[62619]: INFO nova.compute.manager [-] [instance: 460c12f6-5eb8-427c-bda4-6773e1bc9034] Took 1.44 seconds to deallocate network for instance. [ 1146.604925] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._sync_power_states {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1146.932060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1146.932060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1146.932060] env[62619]: DEBUG nova.objects.instance [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'resources' on Instance uuid 460c12f6-5eb8-427c-bda4-6773e1bc9034 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1147.108588] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Getting list of instances from cluster (obj){ [ 1147.108588] env[62619]: value = "domain-c8" [ 1147.108588] env[62619]: _type = "ClusterComputeResource" [ 1147.108588] env[62619]: } {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2116}} [ 1147.109639] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae147e5-d3e8-4531-9e3a-6aea656ca333 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.120704] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Got total of 1 instances {{(pid=62619) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2125}} [ 1147.120854] env[62619]: WARNING nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] While synchronizing instance power states, found 2 instances in the database and 1 instances on the hypervisor. [ 1147.120991] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Triggering sync for uuid 460c12f6-5eb8-427c-bda4-6773e1bc9034 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1147.121209] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Triggering sync for uuid 072a9ba2-95d0-40c3-b323-21130df616f6 {{(pid=62619) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10338}} [ 1147.121518] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.121757] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1147.473116] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273e2364-2941-4de5-9d76-573c2a0327a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.481260] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff6076e1-d735-4d54-aa04-9699b3df0b60 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.512010] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c2df87f-9a8e-4446-a8f2-954479dc87a4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.518802] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-049d3ea9-8a95-4b3b-a893-bf6232ba3797 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1147.532454] env[62619]: DEBUG nova.compute.provider_tree [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1148.037454] env[62619]: DEBUG nova.scheduler.client.report [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1148.542282] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.611s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1148.560698] env[62619]: INFO nova.scheduler.client.report [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleted allocations for instance 460c12f6-5eb8-427c-bda4-6773e1bc9034 [ 1149.069118] env[62619]: DEBUG oslo_concurrency.lockutils [None req-221bab6d-f6e8-4102-ad69-1ae134f7d745 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.188s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1149.069983] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 1.948s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1149.070439] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c43f257-4f1a-48d0-87bf-f7fae0443adc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.079870] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5373e2-3e97-4dc4-b1b8-84788c238cba {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1149.605506] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "460c12f6-5eb8-427c-bda4-6773e1bc9034" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.535s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1150.438414] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1150.438657] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290563', 'volume_id': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'name': 'volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '072a9ba2-95d0-40c3-b323-21130df616f6', 'attached_at': '', 'detached_at': '', 'volume_id': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'serial': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1150.439553] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-501db31f-dd71-4288-b9b9-72b3689a3633 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.455724] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-edaea98b-d0df-4ad1-a3f4-29d594f7c6fe {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.480099] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1/volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1150.480339] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-429c0778-91ed-492b-92eb-43fb0e9398f0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.498057] env[62619]: DEBUG oslo_vmware.api [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1150.498057] env[62619]: value = "task-1365449" [ 1150.498057] env[62619]: _type = "Task" [ 1150.498057] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.505684] env[62619]: DEBUG oslo_vmware.api [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365449, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1150.671209] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "95031e1a-e567-496f-ac3b-7d0121e487f4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1150.671531] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.007554] env[62619]: DEBUG oslo_vmware.api [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365449, 'name': ReconfigVM_Task, 'duration_secs': 0.344388} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.007847] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1/volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1151.012444] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-4238f5cb-6828-4c1b-b7d6-ff5ffee94085 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.027350] env[62619]: DEBUG oslo_vmware.api [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1151.027350] env[62619]: value = "task-1365450" [ 1151.027350] env[62619]: _type = "Task" [ 1151.027350] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.035324] env[62619]: DEBUG oslo_vmware.api [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365450, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.174257] env[62619]: DEBUG nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1151.531694] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1151.538466] env[62619]: DEBUG oslo_vmware.api [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365450, 'name': ReconfigVM_Task, 'duration_secs': 0.144786} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.538761] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290563', 'volume_id': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'name': 'volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '072a9ba2-95d0-40c3-b323-21130df616f6', 'attached_at': '', 'detached_at': '', 'volume_id': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'serial': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1151.693939] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1151.694251] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1151.695774] env[62619]: INFO nova.compute.claims [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1152.035178] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1152.573572] env[62619]: DEBUG nova.objects.instance [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 072a9ba2-95d0-40c3-b323-21130df616f6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1152.742577] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015d3876-c030-465b-8ada-359ad62e6cb6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.749930] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-acc527be-0fef-4c63-8e00-167b1a5e6a00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.778762] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5e0205e-c9da-4da0-a5bf-d5fe7839275a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.785795] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41053f7c-5f53-43fb-bc0b-71fe5507377f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.802454] env[62619]: DEBUG nova.compute.provider_tree [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.079041] env[62619]: DEBUG oslo_concurrency.lockutils [None req-06ee11d3-5c0d-4e2b-bc16-1e58912dff23 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.237s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.079761] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 5.957s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.080231] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d657796-af00-4112-90f5-f355256a38e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.306569] env[62619]: DEBUG nova.scheduler.client.report [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1153.589694] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.510s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.811060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.117s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.811602] env[62619]: DEBUG nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1153.814250] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 1.779s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.814431] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1153.814582] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1153.815439] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b33197e9-713b-4aec-be88-64374ff30da0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.823733] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-645ce908-cc09-40a8-8a17-ee8a842bd929 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.838977] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a132578-ec87-4b57-9b67-36aac960e6e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.845661] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d39c785d-2579-4cce-b4fd-a439f03dba06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.874103] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180679MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1153.874267] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.874463] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1153.894123] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1153.894386] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1154.320778] env[62619]: DEBUG nova.compute.utils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1154.322211] env[62619]: DEBUG nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1154.322474] env[62619]: DEBUG nova.network.neutron [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1154.372549] env[62619]: DEBUG nova.policy [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34a14400ec56409ca356b449a9e30cf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60ccdf8f256c427b9767a01dad0616fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1154.397316] env[62619]: DEBUG nova.compute.utils [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1154.647345] env[62619]: DEBUG nova.network.neutron [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Successfully created port: 0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1154.826766] env[62619]: DEBUG nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1154.899137] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 072a9ba2-95d0-40c3-b323-21130df616f6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1154.899301] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 95031e1a-e567-496f-ac3b-7d0121e487f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1154.899483] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 2 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1154.899623] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=896MB phys_disk=200GB used_disk=2GB total_vcpus=48 used_vcpus=2 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1154.902495] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.008s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1154.939981] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd882106-7e4e-4df7-818a-6e2817294ff1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.947604] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccbe2610-07fa-4eb9-b723-75c9963d3d06 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.977624] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24f20012-a928-4fbc-8071-3cb73838fcb9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.984331] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a433f2f0-fe43-4811-8a3a-e6cb3231f0c0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.997013] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1155.499862] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1155.836225] env[62619]: DEBUG nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1155.860196] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1155.860461] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1155.860623] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1155.860808] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1155.860958] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1155.861122] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1155.861330] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1155.861490] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1155.861675] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1155.861864] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1155.862053] env[62619]: DEBUG nova.virt.hardware [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1155.862894] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c07a235d-49aa-4cfb-9eb9-1891be0e879c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.870782] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29258362-5dc2-44d3-ab02-2554d619d91e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1155.965111] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1155.965391] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1155.965631] env[62619]: INFO nova.compute.manager [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Attaching volume 383b4b3d-4244-4837-b511-1bb722f8224e to /dev/sdc [ 1155.997633] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4eded815-28f7-4f57-aefd-7f4ab9870946 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.004365] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1156.004595] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.130s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.005322] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c962f5e8-e84e-44ca-8008-6d95946891e3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1156.018094] env[62619]: DEBUG nova.virt.block_device [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updating existing volume attachment record: 17419f50-df3b-4565-855d-b1e0e03fc883 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1156.063833] env[62619]: DEBUG nova.compute.manager [req-d948f55c-faa6-4e2f-a71a-77e637129241 req-de37a28e-9e8d-405e-832a-d7ebdf794a00 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Received event network-vif-plugged-0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1156.064072] env[62619]: DEBUG oslo_concurrency.lockutils [req-d948f55c-faa6-4e2f-a71a-77e637129241 req-de37a28e-9e8d-405e-832a-d7ebdf794a00 service nova] Acquiring lock "95031e1a-e567-496f-ac3b-7d0121e487f4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1156.064293] env[62619]: DEBUG oslo_concurrency.lockutils [req-d948f55c-faa6-4e2f-a71a-77e637129241 req-de37a28e-9e8d-405e-832a-d7ebdf794a00 service nova] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1156.064460] env[62619]: DEBUG oslo_concurrency.lockutils [req-d948f55c-faa6-4e2f-a71a-77e637129241 req-de37a28e-9e8d-405e-832a-d7ebdf794a00 service nova] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1156.064631] env[62619]: DEBUG nova.compute.manager [req-d948f55c-faa6-4e2f-a71a-77e637129241 req-de37a28e-9e8d-405e-832a-d7ebdf794a00 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] No waiting events found dispatching network-vif-plugged-0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1156.064798] env[62619]: WARNING nova.compute.manager [req-d948f55c-faa6-4e2f-a71a-77e637129241 req-de37a28e-9e8d-405e-832a-d7ebdf794a00 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Received unexpected event network-vif-plugged-0049dc0f-0d62-42b6-9f0b-22f65b331c95 for instance with vm_state building and task_state spawning. [ 1156.151009] env[62619]: DEBUG nova.network.neutron [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Successfully updated port: 0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1156.649136] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1156.649290] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1156.649411] env[62619]: DEBUG nova.network.neutron [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1157.181776] env[62619]: DEBUG nova.network.neutron [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1157.300257] env[62619]: DEBUG nova.network.neutron [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Updating instance_info_cache with network_info: [{"id": "0049dc0f-0d62-42b6-9f0b-22f65b331c95", "address": "fa:16:3e:71:b4:66", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0049dc0f-0d", "ovs_interfaceid": "0049dc0f-0d62-42b6-9f0b-22f65b331c95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1157.802976] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1157.803374] env[62619]: DEBUG nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Instance network_info: |[{"id": "0049dc0f-0d62-42b6-9f0b-22f65b331c95", "address": "fa:16:3e:71:b4:66", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0049dc0f-0d", "ovs_interfaceid": "0049dc0f-0d62-42b6-9f0b-22f65b331c95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1157.803862] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:b4:66', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '0049dc0f-0d62-42b6-9f0b-22f65b331c95', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1157.811286] env[62619]: DEBUG oslo.service.loopingcall [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1157.811498] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1157.811729] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2f6b7378-4527-468e-959d-85e49d51fecb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1157.831404] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1157.831404] env[62619]: value = "task-1365452" [ 1157.831404] env[62619]: _type = "Task" [ 1157.831404] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1157.838622] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365452, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.006479] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.006724] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1158.006920] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1158.119575] env[62619]: DEBUG nova.compute.manager [req-c0e23d2d-85eb-4324-9f46-4e0db2fb612e req-e0fe79b6-1f5d-49bb-9641-f2e2a5eb1219 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Received event network-changed-0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1158.119784] env[62619]: DEBUG nova.compute.manager [req-c0e23d2d-85eb-4324-9f46-4e0db2fb612e req-e0fe79b6-1f5d-49bb-9641-f2e2a5eb1219 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Refreshing instance network info cache due to event network-changed-0049dc0f-0d62-42b6-9f0b-22f65b331c95. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1158.120051] env[62619]: DEBUG oslo_concurrency.lockutils [req-c0e23d2d-85eb-4324-9f46-4e0db2fb612e req-e0fe79b6-1f5d-49bb-9641-f2e2a5eb1219 service nova] Acquiring lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.120208] env[62619]: DEBUG oslo_concurrency.lockutils [req-c0e23d2d-85eb-4324-9f46-4e0db2fb612e req-e0fe79b6-1f5d-49bb-9641-f2e2a5eb1219 service nova] Acquired lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.120374] env[62619]: DEBUG nova.network.neutron [req-c0e23d2d-85eb-4324-9f46-4e0db2fb612e req-e0fe79b6-1f5d-49bb-9641-f2e2a5eb1219 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Refreshing network info cache for port 0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1158.341940] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365452, 'name': CreateVM_Task, 'duration_secs': 0.276083} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.342323] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1158.342815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.343019] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.343380] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1158.343646] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-58ba55cd-6e48-4770-b9ad-2c10f918e2d9 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.348096] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1158.348096] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520874c8-2d4d-a9cd-8520-5fb121be7a64" [ 1158.348096] env[62619]: _type = "Task" [ 1158.348096] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.356899] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520874c8-2d4d-a9cd-8520-5fb121be7a64, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1158.816888] env[62619]: DEBUG nova.network.neutron [req-c0e23d2d-85eb-4324-9f46-4e0db2fb612e req-e0fe79b6-1f5d-49bb-9641-f2e2a5eb1219 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Updated VIF entry in instance network info cache for port 0049dc0f-0d62-42b6-9f0b-22f65b331c95. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1158.817298] env[62619]: DEBUG nova.network.neutron [req-c0e23d2d-85eb-4324-9f46-4e0db2fb612e req-e0fe79b6-1f5d-49bb-9641-f2e2a5eb1219 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Updating instance_info_cache with network_info: [{"id": "0049dc0f-0d62-42b6-9f0b-22f65b331c95", "address": "fa:16:3e:71:b4:66", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0049dc0f-0d", "ovs_interfaceid": "0049dc0f-0d62-42b6-9f0b-22f65b331c95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1158.858734] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]520874c8-2d4d-a9cd-8520-5fb121be7a64, 'name': SearchDatastore_Task, 'duration_secs': 0.009066} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.859406] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1158.859658] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1158.859893] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1158.860061] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1158.860246] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1158.860522] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-748b3e9c-dbea-4318-9d98-37347f8434e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.869258] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1158.869442] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1158.870194] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ead45fc3-f26b-4761-adaa-6a1829f9c8c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.876719] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1158.876719] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52be8eae-f6ec-393a-a15f-c3ad3fccdc6f" [ 1158.876719] env[62619]: _type = "Task" [ 1158.876719] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.887569] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52be8eae-f6ec-393a-a15f-c3ad3fccdc6f, 'name': SearchDatastore_Task} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1158.888332] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bf82cda3-de9b-44a9-a71f-ea37f459188e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1158.893620] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1158.893620] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5283c804-6fe2-e044-e905-ee9dffe73507" [ 1158.893620] env[62619]: _type = "Task" [ 1158.893620] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1158.900708] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5283c804-6fe2-e044-e905-ee9dffe73507, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.319663] env[62619]: DEBUG oslo_concurrency.lockutils [req-c0e23d2d-85eb-4324-9f46-4e0db2fb612e req-e0fe79b6-1f5d-49bb-9641-f2e2a5eb1219 service nova] Releasing lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.403965] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5283c804-6fe2-e044-e905-ee9dffe73507, 'name': SearchDatastore_Task, 'duration_secs': 0.008327} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1159.404377] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "[datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1159.404530] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Copying Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 95031e1a-e567-496f-ac3b-7d0121e487f4/95031e1a-e567-496f-ac3b-7d0121e487f4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1159.405175] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8314c964-e1e9-45ce-bb03-9be4ff881958 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1159.411042] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1159.411042] env[62619]: value = "task-1365454" [ 1159.411042] env[62619]: _type = "Task" [ 1159.411042] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1159.419224] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365454, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1159.921384] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365454, 'name': CopyVirtualDisk_Task} progress is 89%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.018333] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1160.018693] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.018939] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.019235] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.019506] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.019718] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.019956] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1160.020180] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1160.422012] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365454, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.563139] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1160.563400] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290564', 'volume_id': '383b4b3d-4244-4837-b511-1bb722f8224e', 'name': 'volume-383b4b3d-4244-4837-b511-1bb722f8224e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '072a9ba2-95d0-40c3-b323-21130df616f6', 'attached_at': '', 'detached_at': '', 'volume_id': '383b4b3d-4244-4837-b511-1bb722f8224e', 'serial': '383b4b3d-4244-4837-b511-1bb722f8224e'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1160.564329] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-efd55108-2cec-43da-aad4-b0948985867d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.580055] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78546fd8-7b6f-4d58-8cd4-36f7a2243dfa {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.607085] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfiguring VM instance instance-0000006a to attach disk [datastore1] volume-383b4b3d-4244-4837-b511-1bb722f8224e/volume-383b4b3d-4244-4837-b511-1bb722f8224e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1160.607316] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-469273f4-a017-47a4-93d0-7376bbcad7d2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1160.625079] env[62619]: DEBUG oslo_vmware.api [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1160.625079] env[62619]: value = "task-1365455" [ 1160.625079] env[62619]: _type = "Task" [ 1160.625079] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1160.632592] env[62619]: DEBUG oslo_vmware.api [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365455, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1160.924181] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365454, 'name': CopyVirtualDisk_Task} progress is 100%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.134288] env[62619]: DEBUG oslo_vmware.api [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365455, 'name': ReconfigVM_Task, 'duration_secs': 0.392106} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.134564] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfigured VM instance instance-0000006a to attach disk [datastore1] volume-383b4b3d-4244-4837-b511-1bb722f8224e/volume-383b4b3d-4244-4837-b511-1bb722f8224e.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1161.139182] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-e05fbe9d-a7c2-49de-8dd2-23a6a3997588 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.155048] env[62619]: DEBUG oslo_vmware.api [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1161.155048] env[62619]: value = "task-1365456" [ 1161.155048] env[62619]: _type = "Task" [ 1161.155048] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.165265] env[62619]: DEBUG oslo_vmware.api [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365456, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.423895] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365454, 'name': CopyVirtualDisk_Task, 'duration_secs': 1.566284} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.424264] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Copied Virtual Disk [datastore1] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore1] 95031e1a-e567-496f-ac3b-7d0121e487f4/95031e1a-e567-496f-ac3b-7d0121e487f4.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1161.424334] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1161.424595] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-de111c59-0989-4767-81ef-5e1362943ff5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.431279] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1161.431279] env[62619]: value = "task-1365457" [ 1161.431279] env[62619]: _type = "Task" [ 1161.431279] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.439371] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365457, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1161.664313] env[62619]: DEBUG oslo_vmware.api [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365456, 'name': ReconfigVM_Task, 'duration_secs': 0.129875} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.664685] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290564', 'volume_id': '383b4b3d-4244-4837-b511-1bb722f8224e', 'name': 'volume-383b4b3d-4244-4837-b511-1bb722f8224e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '072a9ba2-95d0-40c3-b323-21130df616f6', 'attached_at': '', 'detached_at': '', 'volume_id': '383b4b3d-4244-4837-b511-1bb722f8224e', 'serial': '383b4b3d-4244-4837-b511-1bb722f8224e'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1161.941356] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365457, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.054243} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1161.941601] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1161.942362] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd1581d-3877-463b-820a-bfc3ef2e75a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.963159] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Reconfiguring VM instance instance-0000006b to attach disk [datastore1] 95031e1a-e567-496f-ac3b-7d0121e487f4/95031e1a-e567-496f-ac3b-7d0121e487f4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1161.963392] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-6a9597b4-a64f-4d93-a756-09babbb52f7e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1161.981899] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1161.981899] env[62619]: value = "task-1365458" [ 1161.981899] env[62619]: _type = "Task" [ 1161.981899] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1161.989142] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365458, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.491199] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365458, 'name': ReconfigVM_Task, 'duration_secs': 0.23895} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1162.491627] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Reconfigured VM instance instance-0000006b to attach disk [datastore1] 95031e1a-e567-496f-ac3b-7d0121e487f4/95031e1a-e567-496f-ac3b-7d0121e487f4.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1162.492056] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-fa241704-23e0-4678-900c-7e9893061a75 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1162.498240] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1162.498240] env[62619]: value = "task-1365459" [ 1162.498240] env[62619]: _type = "Task" [ 1162.498240] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1162.510545] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365459, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1162.702940] env[62619]: DEBUG nova.objects.instance [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 072a9ba2-95d0-40c3-b323-21130df616f6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1163.008492] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365459, 'name': Rename_Task, 'duration_secs': 0.123214} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.008775] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1163.009118] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-b176f61b-6654-4c99-a26e-7957362c7a7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.015260] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1163.015260] env[62619]: value = "task-1365460" [ 1163.015260] env[62619]: _type = "Task" [ 1163.015260] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1163.022438] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365460, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1163.208837] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ac5397bc-9086-480c-a800-31c408eb8676 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 7.243s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1163.496094] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1163.496470] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1163.524648] env[62619]: DEBUG oslo_vmware.api [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365460, 'name': PowerOnVM_Task, 'duration_secs': 0.41141} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1163.524894] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1163.525113] env[62619]: INFO nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Took 7.69 seconds to spawn the instance on the hypervisor. [ 1163.525305] env[62619]: DEBUG nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1163.526142] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0b6dba5-16e0-49e3-8b4f-07722c781ab1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1163.999910] env[62619]: INFO nova.compute.manager [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Detaching volume e50f943a-d0e0-4992-bcd6-2f3a753eb8f1 [ 1164.030196] env[62619]: INFO nova.virt.block_device [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Attempting to driver detach volume e50f943a-d0e0-4992-bcd6-2f3a753eb8f1 from mountpoint /dev/sdb [ 1164.030196] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1164.030196] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290563', 'volume_id': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'name': 'volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '072a9ba2-95d0-40c3-b323-21130df616f6', 'attached_at': '', 'detached_at': '', 'volume_id': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'serial': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1164.030758] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-804dd9e2-cf24-4006-a529-f8a7ab0280c7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.059548] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7d39e62-f423-4f61-b3a6-22c36b1940c2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.062263] env[62619]: INFO nova.compute.manager [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Took 12.38 seconds to build instance. [ 1164.067467] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47fefeec-e38f-4289-925c-7fb117e95e1f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.091367] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-73daa58f-54eb-4be4-a79e-41b37ef6ce8c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.105723] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] The volume has not been displaced from its original location: [datastore1] volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1/volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1164.110718] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfiguring VM instance instance-0000006a to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1164.111198] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c6339abd-e4cc-4146-a58e-ccf5d393fef8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.128285] env[62619]: DEBUG oslo_vmware.api [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1164.128285] env[62619]: value = "task-1365461" [ 1164.128285] env[62619]: _type = "Task" [ 1164.128285] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.135242] env[62619]: DEBUG oslo_vmware.api [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365461, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.565142] env[62619]: DEBUG oslo_concurrency.lockutils [None req-d51c435a-5265-491f-b8aa-04c4ad1ed895 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.893s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1164.637893] env[62619]: DEBUG oslo_vmware.api [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365461, 'name': ReconfigVM_Task, 'duration_secs': 0.233621} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1164.638186] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfigured VM instance instance-0000006a to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1164.642599] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-b5451740-7113-42c9-8355-bd57a92be5a5 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1164.659420] env[62619]: DEBUG oslo_vmware.api [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1164.659420] env[62619]: value = "task-1365462" [ 1164.659420] env[62619]: _type = "Task" [ 1164.659420] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1164.667388] env[62619]: DEBUG oslo_vmware.api [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365462, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1164.753980] env[62619]: DEBUG nova.compute.manager [req-1de55ea2-6276-4caa-9fc9-f654d1fcac5a req-d8b9d569-880f-4fe3-a205-c483d3755f57 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Received event network-changed-0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1164.754204] env[62619]: DEBUG nova.compute.manager [req-1de55ea2-6276-4caa-9fc9-f654d1fcac5a req-d8b9d569-880f-4fe3-a205-c483d3755f57 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Refreshing instance network info cache due to event network-changed-0049dc0f-0d62-42b6-9f0b-22f65b331c95. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1164.754479] env[62619]: DEBUG oslo_concurrency.lockutils [req-1de55ea2-6276-4caa-9fc9-f654d1fcac5a req-d8b9d569-880f-4fe3-a205-c483d3755f57 service nova] Acquiring lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1164.754610] env[62619]: DEBUG oslo_concurrency.lockutils [req-1de55ea2-6276-4caa-9fc9-f654d1fcac5a req-d8b9d569-880f-4fe3-a205-c483d3755f57 service nova] Acquired lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1164.754829] env[62619]: DEBUG nova.network.neutron [req-1de55ea2-6276-4caa-9fc9-f654d1fcac5a req-d8b9d569-880f-4fe3-a205-c483d3755f57 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Refreshing network info cache for port 0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1165.169086] env[62619]: DEBUG oslo_vmware.api [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365462, 'name': ReconfigVM_Task, 'duration_secs': 0.129957} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1165.169322] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290563', 'volume_id': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'name': 'volume-e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '072a9ba2-95d0-40c3-b323-21130df616f6', 'attached_at': '', 'detached_at': '', 'volume_id': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1', 'serial': 'e50f943a-d0e0-4992-bcd6-2f3a753eb8f1'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1165.459165] env[62619]: DEBUG nova.network.neutron [req-1de55ea2-6276-4caa-9fc9-f654d1fcac5a req-d8b9d569-880f-4fe3-a205-c483d3755f57 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Updated VIF entry in instance network info cache for port 0049dc0f-0d62-42b6-9f0b-22f65b331c95. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1165.459614] env[62619]: DEBUG nova.network.neutron [req-1de55ea2-6276-4caa-9fc9-f654d1fcac5a req-d8b9d569-880f-4fe3-a205-c483d3755f57 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Updating instance_info_cache with network_info: [{"id": "0049dc0f-0d62-42b6-9f0b-22f65b331c95", "address": "fa:16:3e:71:b4:66", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.153", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap0049dc0f-0d", "ovs_interfaceid": "0049dc0f-0d62-42b6-9f0b-22f65b331c95", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1165.710063] env[62619]: DEBUG nova.objects.instance [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 072a9ba2-95d0-40c3-b323-21130df616f6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1165.961936] env[62619]: DEBUG oslo_concurrency.lockutils [req-1de55ea2-6276-4caa-9fc9-f654d1fcac5a req-d8b9d569-880f-4fe3-a205-c483d3755f57 service nova] Releasing lock "refresh_cache-95031e1a-e567-496f-ac3b-7d0121e487f4" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1166.717291] env[62619]: DEBUG oslo_concurrency.lockutils [None req-2ccdb72a-9566-4842-a24a-220f863e84d6 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.221s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1166.752715] env[62619]: DEBUG oslo_concurrency.lockutils [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1166.752972] env[62619]: DEBUG oslo_concurrency.lockutils [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1167.256273] env[62619]: INFO nova.compute.manager [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Detaching volume 383b4b3d-4244-4837-b511-1bb722f8224e [ 1167.288808] env[62619]: INFO nova.virt.block_device [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Attempting to driver detach volume 383b4b3d-4244-4837-b511-1bb722f8224e from mountpoint /dev/sdc [ 1167.289181] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1167.289495] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290564', 'volume_id': '383b4b3d-4244-4837-b511-1bb722f8224e', 'name': 'volume-383b4b3d-4244-4837-b511-1bb722f8224e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '072a9ba2-95d0-40c3-b323-21130df616f6', 'attached_at': '', 'detached_at': '', 'volume_id': '383b4b3d-4244-4837-b511-1bb722f8224e', 'serial': '383b4b3d-4244-4837-b511-1bb722f8224e'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1167.291308] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f82f5db-b1b5-4100-a60f-0ef661e00a8b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.319441] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08eda84e-14da-40b8-b57d-f9daf6e38231 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.326653] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2e97023-1da6-4cc0-88ba-45f5b3cbfcdd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.345755] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e73d3895-7b58-4c77-9b72-f73ac3295062 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.359731] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] The volume has not been displaced from its original location: [datastore1] volume-383b4b3d-4244-4837-b511-1bb722f8224e/volume-383b4b3d-4244-4837-b511-1bb722f8224e.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1167.364650] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfiguring VM instance instance-0000006a to detach disk 2002 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1167.364911] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-87c2d3ea-ae6b-43de-9772-e43ed0c76ec7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.381778] env[62619]: DEBUG oslo_vmware.api [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1167.381778] env[62619]: value = "task-1365463" [ 1167.381778] env[62619]: _type = "Task" [ 1167.381778] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.389197] env[62619]: DEBUG oslo_vmware.api [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365463, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1167.891611] env[62619]: DEBUG oslo_vmware.api [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365463, 'name': ReconfigVM_Task, 'duration_secs': 0.221846} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1167.891984] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Reconfigured VM instance instance-0000006a to detach disk 2002 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1167.896529] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-bce72b64-93c4-4b6f-9afc-14aa88148bcf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1167.911060] env[62619]: DEBUG oslo_vmware.api [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1167.911060] env[62619]: value = "task-1365464" [ 1167.911060] env[62619]: _type = "Task" [ 1167.911060] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1167.918746] env[62619]: DEBUG oslo_vmware.api [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365464, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.422878] env[62619]: DEBUG oslo_vmware.api [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365464, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1168.922072] env[62619]: DEBUG oslo_vmware.api [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365464, 'name': ReconfigVM_Task, 'duration_secs': 0.752412} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1168.922508] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290564', 'volume_id': '383b4b3d-4244-4837-b511-1bb722f8224e', 'name': 'volume-383b4b3d-4244-4837-b511-1bb722f8224e', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '072a9ba2-95d0-40c3-b323-21130df616f6', 'attached_at': '', 'detached_at': '', 'volume_id': '383b4b3d-4244-4837-b511-1bb722f8224e', 'serial': '383b4b3d-4244-4837-b511-1bb722f8224e'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1169.461599] env[62619]: DEBUG nova.objects.instance [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'flavor' on Instance uuid 072a9ba2-95d0-40c3-b323-21130df616f6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1170.470459] env[62619]: DEBUG oslo_concurrency.lockutils [None req-70be8538-83df-46ad-a0b6-f414a096b802 tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.717s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.129387] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.129734] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.130018] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "072a9ba2-95d0-40c3-b323-21130df616f6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1171.130261] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1171.130465] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1171.132706] env[62619]: INFO nova.compute.manager [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Terminating instance [ 1171.134563] env[62619]: DEBUG nova.compute.manager [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1171.134764] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1171.135616] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a5c3495-851b-414d-be79-1c3140d4a265 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.143860] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1171.144115] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-baa18a3d-6846-4d45-8303-b86a829f7f21 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.150757] env[62619]: DEBUG oslo_vmware.api [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1171.150757] env[62619]: value = "task-1365465" [ 1171.150757] env[62619]: _type = "Task" [ 1171.150757] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.159622] env[62619]: DEBUG oslo_vmware.api [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365465, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1171.660305] env[62619]: DEBUG oslo_vmware.api [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365465, 'name': PowerOffVM_Task, 'duration_secs': 0.181491} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1171.660664] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1171.660762] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1171.660979] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5cccfd74-4fd8-4913-b817-a332248021e4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.729523] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1171.729752] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1171.729924] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Deleting the datastore file [datastore2] 072a9ba2-95d0-40c3-b323-21130df616f6 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1171.730217] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-87437d64-7a1c-4337-bf26-3371fc742feb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1171.737787] env[62619]: DEBUG oslo_vmware.api [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for the task: (returnval){ [ 1171.737787] env[62619]: value = "task-1365467" [ 1171.737787] env[62619]: _type = "Task" [ 1171.737787] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1171.744951] env[62619]: DEBUG oslo_vmware.api [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365467, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1172.248017] env[62619]: DEBUG oslo_vmware.api [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Task: {'id': task-1365467, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.14211} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1172.248344] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1172.248536] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1172.248720] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1172.248899] env[62619]: INFO nova.compute.manager [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Took 1.11 seconds to destroy the instance on the hypervisor. [ 1172.249219] env[62619]: DEBUG oslo.service.loopingcall [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1172.249445] env[62619]: DEBUG nova.compute.manager [-] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1172.249540] env[62619]: DEBUG nova.network.neutron [-] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1172.678860] env[62619]: DEBUG nova.compute.manager [req-5b5d81a3-3d8d-4fb8-8808-a251ac76695a req-2d81a983-b050-45a2-a1ca-d0b96c5ba564 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Received event network-vif-deleted-15698219-89e4-4caa-a849-1b49d8beb863 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1172.679122] env[62619]: INFO nova.compute.manager [req-5b5d81a3-3d8d-4fb8-8808-a251ac76695a req-2d81a983-b050-45a2-a1ca-d0b96c5ba564 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Neutron deleted interface 15698219-89e4-4caa-a849-1b49d8beb863; detaching it from the instance and deleting it from the info cache [ 1172.679321] env[62619]: DEBUG nova.network.neutron [req-5b5d81a3-3d8d-4fb8-8808-a251ac76695a req-2d81a983-b050-45a2-a1ca-d0b96c5ba564 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.158644] env[62619]: DEBUG nova.network.neutron [-] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1173.182164] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-1c7d012b-79dc-4786-9c25-2fe5ac4f666e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.192093] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-040dcfb6-cb4e-47f9-aa4a-0aee9117a4ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1173.216230] env[62619]: DEBUG nova.compute.manager [req-5b5d81a3-3d8d-4fb8-8808-a251ac76695a req-2d81a983-b050-45a2-a1ca-d0b96c5ba564 service nova] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Detach interface failed, port_id=15698219-89e4-4caa-a849-1b49d8beb863, reason: Instance 072a9ba2-95d0-40c3-b323-21130df616f6 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1173.662165] env[62619]: INFO nova.compute.manager [-] [instance: 072a9ba2-95d0-40c3-b323-21130df616f6] Took 1.41 seconds to deallocate network for instance. [ 1174.168818] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1174.169201] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1174.169333] env[62619]: DEBUG nova.objects.instance [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lazy-loading 'resources' on Instance uuid 072a9ba2-95d0-40c3-b323-21130df616f6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1174.712598] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb695afa-1d50-4408-b39d-3d5fe09530b0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.719961] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a026ad8-0f57-432e-a8f2-77fcf6789c4b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.749153] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04a8a9f5-f064-4925-b313-31087122d006 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.755819] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a053f2bb-36d1-4aff-af84-7052f665d379 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1174.768434] env[62619]: DEBUG nova.compute.provider_tree [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1175.271291] env[62619]: DEBUG nova.scheduler.client.report [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1175.778082] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.609s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1175.799938] env[62619]: INFO nova.scheduler.client.report [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Deleted allocations for instance 072a9ba2-95d0-40c3-b323-21130df616f6 [ 1176.308815] env[62619]: DEBUG oslo_concurrency.lockutils [None req-ae5ed077-e07f-4973-a0de-781e0a6f135d tempest-AttachVolumeTestJSON-1743018911 tempest-AttachVolumeTestJSON-1743018911-project-member] Lock "072a9ba2-95d0-40c3-b323-21130df616f6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.179s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1203.532025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "95031e1a-e567-496f-ac3b-7d0121e487f4" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1203.532025] env[62619]: DEBUG oslo_concurrency.lockutils [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1204.035883] env[62619]: DEBUG nova.compute.utils [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1204.538397] env[62619]: DEBUG oslo_concurrency.lockutils [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1205.596618] env[62619]: DEBUG oslo_concurrency.lockutils [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "95031e1a-e567-496f-ac3b-7d0121e487f4" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1205.596988] env[62619]: DEBUG oslo_concurrency.lockutils [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1205.597189] env[62619]: INFO nova.compute.manager [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Attaching volume 11868543-b259-4b58-bf45-955a53cf23b7 to /dev/sdb [ 1205.627012] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eb9bcac-d456-4332-a66a-0d8ab2d77ae8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.633717] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d1ce30a9-d2dc-4ec7-a6cf-21fe90d6b83e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1205.646607] env[62619]: DEBUG nova.virt.block_device [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Updating existing volume attachment record: e94d8cc4-e1e6-47ef-8acb-5ffdb9f0ba11 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1210.188744] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1210.189069] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290566', 'volume_id': '11868543-b259-4b58-bf45-955a53cf23b7', 'name': 'volume-11868543-b259-4b58-bf45-955a53cf23b7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '95031e1a-e567-496f-ac3b-7d0121e487f4', 'attached_at': '', 'detached_at': '', 'volume_id': '11868543-b259-4b58-bf45-955a53cf23b7', 'serial': '11868543-b259-4b58-bf45-955a53cf23b7'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1210.189946] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a166e224-8934-44d2-92a7-74b083c4be4d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.207055] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1394e612-b0ff-4606-ab2d-c1531fa59c5d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.229964] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Reconfiguring VM instance instance-0000006b to attach disk [datastore2] volume-11868543-b259-4b58-bf45-955a53cf23b7/volume-11868543-b259-4b58-bf45-955a53cf23b7.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1210.230216] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3559a78c-7827-400d-b86d-bcba25ad133a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1210.246911] env[62619]: DEBUG oslo_vmware.api [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1210.246911] env[62619]: value = "task-1365473" [ 1210.246911] env[62619]: _type = "Task" [ 1210.246911] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1210.254191] env[62619]: DEBUG oslo_vmware.api [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365473, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1210.756505] env[62619]: DEBUG oslo_vmware.api [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365473, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.257487] env[62619]: DEBUG oslo_vmware.api [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365473, 'name': ReconfigVM_Task} progress is 14%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1211.758172] env[62619]: DEBUG oslo_vmware.api [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365473, 'name': ReconfigVM_Task, 'duration_secs': 1.242392} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1211.758489] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Reconfigured VM instance instance-0000006b to attach disk [datastore2] volume-11868543-b259-4b58-bf45-955a53cf23b7/volume-11868543-b259-4b58-bf45-955a53cf23b7.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1211.763193] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-12088bc7-fb7f-4ae2-a9f6-ddcf2764ebae {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.778590] env[62619]: DEBUG oslo_vmware.api [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1211.778590] env[62619]: value = "task-1365474" [ 1211.778590] env[62619]: _type = "Task" [ 1211.778590] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1211.786413] env[62619]: DEBUG oslo_vmware.api [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365474, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1212.288027] env[62619]: DEBUG oslo_vmware.api [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365474, 'name': ReconfigVM_Task, 'duration_secs': 0.122436} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1212.288405] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290566', 'volume_id': '11868543-b259-4b58-bf45-955a53cf23b7', 'name': 'volume-11868543-b259-4b58-bf45-955a53cf23b7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '95031e1a-e567-496f-ac3b-7d0121e487f4', 'attached_at': '', 'detached_at': '', 'volume_id': '11868543-b259-4b58-bf45-955a53cf23b7', 'serial': '11868543-b259-4b58-bf45-955a53cf23b7'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1213.323589] env[62619]: DEBUG nova.objects.instance [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'flavor' on Instance uuid 95031e1a-e567-496f-ac3b-7d0121e487f4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1213.531562] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.531867] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.532075] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.532230] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1213.532388] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1213.827905] env[62619]: DEBUG oslo_concurrency.lockutils [None req-868963a6-d319-4e4d-9517-21eb292554a5 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.231s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.035202] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.035457] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.035622] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1214.035798] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1214.036744] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf0bbd7c-923c-4cf8-86ad-74f0c8761cda {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.045089] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f20fc5f6-0b8f-4df1-a825-7489fb709bf1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.058571] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d22818ab-dab3-40a4-b0f6-aa488d35fcf3 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.064557] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79533bd5-9005-44b3-98fb-79f8ecd671b6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.092135] env[62619]: DEBUG oslo_concurrency.lockutils [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "95031e1a-e567-496f-ac3b-7d0121e487f4" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.092326] env[62619]: DEBUG oslo_concurrency.lockutils [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.093865] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181061MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1214.094010] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1214.094196] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1214.597459] env[62619]: INFO nova.compute.manager [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Detaching volume 11868543-b259-4b58-bf45-955a53cf23b7 [ 1214.628662] env[62619]: INFO nova.virt.block_device [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Attempting to driver detach volume 11868543-b259-4b58-bf45-955a53cf23b7 from mountpoint /dev/sdb [ 1214.628909] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1214.629114] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290566', 'volume_id': '11868543-b259-4b58-bf45-955a53cf23b7', 'name': 'volume-11868543-b259-4b58-bf45-955a53cf23b7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '95031e1a-e567-496f-ac3b-7d0121e487f4', 'attached_at': '', 'detached_at': '', 'volume_id': '11868543-b259-4b58-bf45-955a53cf23b7', 'serial': '11868543-b259-4b58-bf45-955a53cf23b7'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1214.629976] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb9c9a63-021f-407e-8328-d28860738362 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.653109] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a07b191-b082-416b-8765-8d96023e8134 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.660283] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfaf3da7-25a1-43de-8cfb-58c08dc987bf {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.682077] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06f00028-b91e-476b-aaa9-484c0269eb9e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.696793] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] The volume has not been displaced from its original location: [datastore2] volume-11868543-b259-4b58-bf45-955a53cf23b7/volume-11868543-b259-4b58-bf45-955a53cf23b7.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1214.701837] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Reconfiguring VM instance instance-0000006b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1214.702139] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-c7effb3e-d928-40d0-b42c-2f1c311066e6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1214.718908] env[62619]: DEBUG oslo_vmware.api [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1214.718908] env[62619]: value = "task-1365475" [ 1214.718908] env[62619]: _type = "Task" [ 1214.718908] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1214.726232] env[62619]: DEBUG oslo_vmware.api [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365475, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.137863] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 95031e1a-e567-496f-ac3b-7d0121e487f4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1215.138085] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1215.138235] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1215.164263] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb94cfe8-7baf-465b-99fe-913f2a3551cc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.171580] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c42c4743-d989-4cb4-a06d-b1441a0d384c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.200096] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34a962f3-fef2-4a61-9b46-6ba845c6e749 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.206602] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec546f3e-1e20-499f-a25b-08138a1c6d70 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.220415] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1215.228416] env[62619]: DEBUG oslo_vmware.api [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365475, 'name': ReconfigVM_Task, 'duration_secs': 0.246077} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.229180] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Reconfigured VM instance instance-0000006b to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1215.233676] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a44ace2a-08a2-4480-91cf-533404cfe62b {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1215.248226] env[62619]: DEBUG oslo_vmware.api [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1215.248226] env[62619]: value = "task-1365476" [ 1215.248226] env[62619]: _type = "Task" [ 1215.248226] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1215.255470] env[62619]: DEBUG oslo_vmware.api [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365476, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1215.724958] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1215.757409] env[62619]: DEBUG oslo_vmware.api [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365476, 'name': ReconfigVM_Task, 'duration_secs': 0.137034} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1215.757702] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290566', 'volume_id': '11868543-b259-4b58-bf45-955a53cf23b7', 'name': 'volume-11868543-b259-4b58-bf45-955a53cf23b7', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '95031e1a-e567-496f-ac3b-7d0121e487f4', 'attached_at': '', 'detached_at': '', 'volume_id': '11868543-b259-4b58-bf45-955a53cf23b7', 'serial': '11868543-b259-4b58-bf45-955a53cf23b7'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1216.229776] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1216.230032] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.136s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1216.296643] env[62619]: DEBUG nova.objects.instance [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'flavor' on Instance uuid 95031e1a-e567-496f-ac3b-7d0121e487f4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1217.303579] env[62619]: DEBUG oslo_concurrency.lockutils [None req-21b760f1-768e-4d12-9be9-663926936008 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.211s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.306392] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "95031e1a-e567-496f-ac3b-7d0121e487f4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.306776] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.306932] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "95031e1a-e567-496f-ac3b-7d0121e487f4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1218.307151] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1218.307333] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1218.309811] env[62619]: INFO nova.compute.manager [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Terminating instance [ 1218.311523] env[62619]: DEBUG nova.compute.manager [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1218.311727] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1218.312567] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a826a57-6ff4-4fc0-bd9c-9b78ec76cf4a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.320488] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1218.320983] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-914b95ac-077a-43e5-b43e-9ac9297e56fb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.327259] env[62619]: DEBUG oslo_vmware.api [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1218.327259] env[62619]: value = "task-1365477" [ 1218.327259] env[62619]: _type = "Task" [ 1218.327259] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.334737] env[62619]: DEBUG oslo_vmware.api [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365477, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1218.841620] env[62619]: DEBUG oslo_vmware.api [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365477, 'name': PowerOffVM_Task, 'duration_secs': 0.200464} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1218.841955] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1218.842270] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1218.843022] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c37d1cbc-45af-42cc-aa7c-c750f302545a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.937210] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1218.937507] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Deleting contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1218.937791] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleting the datastore file [datastore1] 95031e1a-e567-496f-ac3b-7d0121e487f4 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1218.938215] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1fc97703-f6ac-47c6-8d9f-887530eca2ef {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1218.946714] env[62619]: DEBUG oslo_vmware.api [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1218.946714] env[62619]: value = "task-1365479" [ 1218.946714] env[62619]: _type = "Task" [ 1218.946714] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1218.960411] env[62619]: DEBUG oslo_vmware.api [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365479, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1219.457113] env[62619]: DEBUG oslo_vmware.api [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365479, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.146801} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1219.457480] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1219.457662] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Deleted contents of the VM from datastore datastore1 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1219.457914] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1219.458177] env[62619]: INFO nova.compute.manager [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Took 1.15 seconds to destroy the instance on the hypervisor. [ 1219.458472] env[62619]: DEBUG oslo.service.loopingcall [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1219.458710] env[62619]: DEBUG nova.compute.manager [-] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1219.458833] env[62619]: DEBUG nova.network.neutron [-] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1220.136683] env[62619]: DEBUG nova.compute.manager [req-7812c604-de85-4418-8583-c3280dc90085 req-c5151fee-5e2d-47db-974b-7d2d252ed450 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Received event network-vif-deleted-0049dc0f-0d62-42b6-9f0b-22f65b331c95 {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1220.136880] env[62619]: INFO nova.compute.manager [req-7812c604-de85-4418-8583-c3280dc90085 req-c5151fee-5e2d-47db-974b-7d2d252ed450 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Neutron deleted interface 0049dc0f-0d62-42b6-9f0b-22f65b331c95; detaching it from the instance and deleting it from the info cache [ 1220.137043] env[62619]: DEBUG nova.network.neutron [req-7812c604-de85-4418-8583-c3280dc90085 req-c5151fee-5e2d-47db-974b-7d2d252ed450 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.224175] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.224373] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.615811] env[62619]: DEBUG nova.network.neutron [-] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1220.639157] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-49b30a1f-9a59-4df4-af33-fca1406c0325 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.650568] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa557373-cd96-4b95-9f90-0599024078a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1220.676903] env[62619]: DEBUG nova.compute.manager [req-7812c604-de85-4418-8583-c3280dc90085 req-c5151fee-5e2d-47db-974b-7d2d252ed450 service nova] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Detach interface failed, port_id=0049dc0f-0d62-42b6-9f0b-22f65b331c95, reason: Instance 95031e1a-e567-496f-ac3b-7d0121e487f4 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1220.728896] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1220.729164] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1220.729228] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1221.119117] env[62619]: INFO nova.compute.manager [-] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Took 1.66 seconds to deallocate network for instance. [ 1221.231829] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 95031e1a-e567-496f-ac3b-7d0121e487f4] Skipping network cache update for instance because it is being deleted. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9945}} [ 1221.231988] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Didn't find any instances for network info cache update. {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:10014}} [ 1221.232204] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.232370] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.232522] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1221.626115] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1221.626477] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1221.626711] env[62619]: DEBUG nova.objects.instance [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'resources' on Instance uuid 95031e1a-e567-496f-ac3b-7d0121e487f4 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1222.161828] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb884726-90ee-4e9c-8302-2d41475b13d8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.169068] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b7ee9e2-b654-4f22-a907-32c77ba1dc53 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.198807] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b32950-9d28-4304-a28d-dbb7dfd22819 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.205946] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63473e63-fa37-474a-af8d-7306d85a3e5f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1222.218767] env[62619]: DEBUG nova.compute.provider_tree [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1222.722300] env[62619]: DEBUG nova.scheduler.client.report [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1223.227451] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.601s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1223.249240] env[62619]: INFO nova.scheduler.client.report [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleted allocations for instance 95031e1a-e567-496f-ac3b-7d0121e487f4 [ 1223.757899] env[62619]: DEBUG oslo_concurrency.lockutils [None req-666526d5-45ac-4ce4-a49f-ee85ee208f4d tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "95031e1a-e567-496f-ac3b-7d0121e487f4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.451s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1226.166060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1226.166060] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1226.667924] env[62619]: DEBUG nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Starting instance... {{(pid=62619) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2432}} [ 1227.189057] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1227.189057] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1227.190294] env[62619]: INFO nova.compute.claims [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1228.222754] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89c26b39-5eaa-4725-b606-0c8209ec932f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.230512] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-937b1b20-dea5-4d9e-a655-30b05611a521 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.259487] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-626be1d4-e6f4-42a5-bd87-bc3e3454340c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.266118] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f54c1c9d-1fbf-45aa-98ce-4b81a09bcf17 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1228.278454] env[62619]: DEBUG nova.compute.provider_tree [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1228.781217] env[62619]: DEBUG nova.scheduler.client.report [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1229.288023] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 2.099s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1229.288646] env[62619]: DEBUG nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Start building networks asynchronously for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2829}} [ 1229.794142] env[62619]: DEBUG nova.compute.utils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1229.795607] env[62619]: DEBUG nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Allocating IP information in the background. {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1981}} [ 1229.795757] env[62619]: DEBUG nova.network.neutron [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] allocate_for_instance() {{(pid=62619) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1229.845440] env[62619]: DEBUG nova.policy [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '34a14400ec56409ca356b449a9e30cf7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '60ccdf8f256c427b9767a01dad0616fd', 'project_domain_id': 'default', 'roles': ['reader', 'member'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=62619) authorize /opt/stack/nova/nova/policy.py:201}} [ 1230.114030] env[62619]: DEBUG nova.network.neutron [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Successfully created port: 8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1230.299092] env[62619]: DEBUG nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Start building block device mappings for instance. {{(pid=62619) _build_resources /opt/stack/nova/nova/compute/manager.py:2864}} [ 1231.309209] env[62619]: DEBUG nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Start spawning the instance on the hypervisor. {{(pid=62619) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2638}} [ 1231.334896] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Getting desirable topologies for flavor Flavor(created_at=2024-10-25T16:57:48Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=192,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2024-10-25T16:57:31Z,direct_url=,disk_format='vmdk',id=a3c7be48-0721-419b-bbd6-8b4cc36c5604,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='472d4a4483f8413184d68333cfe463c0',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2024-10-25T16:57:31Z,virtual_size=,visibility=), allow threads: False {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1231.335190] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Flavor limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1231.335392] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Image limits 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1231.335590] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Flavor pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1231.335732] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Image pref 0:0:0 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1231.335874] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=62619) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1231.336096] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1231.336264] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1231.336487] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Got 1 possible topologies {{(pid=62619) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1231.336610] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1231.336800] env[62619]: DEBUG nova.virt.hardware [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=62619) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1231.337696] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afb8186e-e0d5-4306-8374-9a5be8606b00 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.345460] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da739ea8-33eb-4956-9cf4-7b25094720eb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1231.467719] env[62619]: DEBUG nova.compute.manager [req-2bc148f6-34d7-479d-93ca-76687f55e62c req-9c9b0359-3c50-48d8-93ff-28621f955dbf service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Received event network-vif-plugged-8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1231.467954] env[62619]: DEBUG oslo_concurrency.lockutils [req-2bc148f6-34d7-479d-93ca-76687f55e62c req-9c9b0359-3c50-48d8-93ff-28621f955dbf service nova] Acquiring lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1231.468238] env[62619]: DEBUG oslo_concurrency.lockutils [req-2bc148f6-34d7-479d-93ca-76687f55e62c req-9c9b0359-3c50-48d8-93ff-28621f955dbf service nova] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1231.468426] env[62619]: DEBUG oslo_concurrency.lockutils [req-2bc148f6-34d7-479d-93ca-76687f55e62c req-9c9b0359-3c50-48d8-93ff-28621f955dbf service nova] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1231.468603] env[62619]: DEBUG nova.compute.manager [req-2bc148f6-34d7-479d-93ca-76687f55e62c req-9c9b0359-3c50-48d8-93ff-28621f955dbf service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] No waiting events found dispatching network-vif-plugged-8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1231.468780] env[62619]: WARNING nova.compute.manager [req-2bc148f6-34d7-479d-93ca-76687f55e62c req-9c9b0359-3c50-48d8-93ff-28621f955dbf service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Received unexpected event network-vif-plugged-8bf05b83-53d1-417d-9952-ffa126ea541d for instance with vm_state building and task_state spawning. [ 1231.550704] env[62619]: DEBUG nova.network.neutron [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Successfully updated port: 8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1232.054048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1232.054048] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1232.054048] env[62619]: DEBUG nova.network.neutron [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Building network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1232.585724] env[62619]: DEBUG nova.network.neutron [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Instance cache missing network info. {{(pid=62619) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1232.704992] env[62619]: DEBUG nova.network.neutron [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updating instance_info_cache with network_info: [{"id": "8bf05b83-53d1-417d-9952-ffa126ea541d", "address": "fa:16:3e:08:ec:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bf05b83-53", "ovs_interfaceid": "8bf05b83-53d1-417d-9952-ffa126ea541d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1233.207842] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1233.208229] env[62619]: DEBUG nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Instance network_info: |[{"id": "8bf05b83-53d1-417d-9952-ffa126ea541d", "address": "fa:16:3e:08:ec:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bf05b83-53", "ovs_interfaceid": "8bf05b83-53d1-417d-9952-ffa126ea541d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=62619) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1996}} [ 1233.208704] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:08:ec:f7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8614db14-cc04-466b-b309-367ab8296cda', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8bf05b83-53d1-417d-9952-ffa126ea541d', 'vif_model': 'vmxnet3'}] {{(pid=62619) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1233.216153] env[62619]: DEBUG oslo.service.loopingcall [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1233.216378] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Creating VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1337}} [ 1233.216607] env[62619]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-7b86386a-404f-413e-86b8-6a8bec208f16 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.237625] env[62619]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1233.237625] env[62619]: value = "task-1365480" [ 1233.237625] env[62619]: _type = "Task" [ 1233.237625] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.246161] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365480, 'name': CreateVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1233.492878] env[62619]: DEBUG nova.compute.manager [req-f3ad2814-5257-49d8-9bdc-f457ca7e73b1 req-af0beb5b-c9df-47cc-a8c2-520da5a3ee30 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Received event network-changed-8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1233.493105] env[62619]: DEBUG nova.compute.manager [req-f3ad2814-5257-49d8-9bdc-f457ca7e73b1 req-af0beb5b-c9df-47cc-a8c2-520da5a3ee30 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Refreshing instance network info cache due to event network-changed-8bf05b83-53d1-417d-9952-ffa126ea541d. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1233.493342] env[62619]: DEBUG oslo_concurrency.lockutils [req-f3ad2814-5257-49d8-9bdc-f457ca7e73b1 req-af0beb5b-c9df-47cc-a8c2-520da5a3ee30 service nova] Acquiring lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.493480] env[62619]: DEBUG oslo_concurrency.lockutils [req-f3ad2814-5257-49d8-9bdc-f457ca7e73b1 req-af0beb5b-c9df-47cc-a8c2-520da5a3ee30 service nova] Acquired lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.493644] env[62619]: DEBUG nova.network.neutron [req-f3ad2814-5257-49d8-9bdc-f457ca7e73b1 req-af0beb5b-c9df-47cc-a8c2-520da5a3ee30 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Refreshing network info cache for port 8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1233.747523] env[62619]: DEBUG oslo_vmware.api [-] Task: {'id': task-1365480, 'name': CreateVM_Task, 'duration_secs': 0.282505} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1233.747895] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Created VM on the ESX host {{(pid=62619) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1359}} [ 1233.748324] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1233.748497] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1233.748836] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired external semaphore "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:321}} [ 1233.749101] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f7bfa697-e39b-4107-ac56-b42252851390 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1233.753498] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1233.753498] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dd9ade-129b-cdd1-2279-c94ae4f77a8d" [ 1233.753498] env[62619]: _type = "Task" [ 1233.753498] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1233.761021] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dd9ade-129b-cdd1-2279-c94ae4f77a8d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.173451] env[62619]: DEBUG nova.network.neutron [req-f3ad2814-5257-49d8-9bdc-f457ca7e73b1 req-af0beb5b-c9df-47cc-a8c2-520da5a3ee30 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updated VIF entry in instance network info cache for port 8bf05b83-53d1-417d-9952-ffa126ea541d. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1234.173807] env[62619]: DEBUG nova.network.neutron [req-f3ad2814-5257-49d8-9bdc-f457ca7e73b1 req-af0beb5b-c9df-47cc-a8c2-520da5a3ee30 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updating instance_info_cache with network_info: [{"id": "8bf05b83-53d1-417d-9952-ffa126ea541d", "address": "fa:16:3e:08:ec:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bf05b83-53", "ovs_interfaceid": "8bf05b83-53d1-417d-9952-ffa126ea541d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1234.263213] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]52dd9ade-129b-cdd1-2279-c94ae4f77a8d, 'name': SearchDatastore_Task, 'duration_secs': 0.009622} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.263521] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.263760] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Processing image a3c7be48-0721-419b-bbd6-8b4cc36c5604 {{(pid=62619) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1234.263993] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1234.264161] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquired lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1234.264344] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Creating directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1234.264596] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8af1132e-b93d-468e-9c03-e321ab1badff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.273409] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Created directory with path [datastore2] devstack-image-cache_base {{(pid=62619) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1234.273614] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Folder [datastore2] devstack-image-cache_base created. {{(pid=62619) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1845}} [ 1234.274248] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b0b45dd3-4d79-40c6-a66f-0d311ff5eae6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.278964] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1234.278964] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5220d4d4-0093-376a-d552-f69976259fea" [ 1234.278964] env[62619]: _type = "Task" [ 1234.278964] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.285664] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5220d4d4-0093-376a-d552-f69976259fea, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1234.677336] env[62619]: DEBUG oslo_concurrency.lockutils [req-f3ad2814-5257-49d8-9bdc-f457ca7e73b1 req-af0beb5b-c9df-47cc-a8c2-520da5a3ee30 service nova] Releasing lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1234.789080] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]5220d4d4-0093-376a-d552-f69976259fea, 'name': SearchDatastore_Task, 'duration_secs': 0.020457} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1234.789816] env[62619]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba925775-8f96-449a-98b4-09401435bedc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1234.794523] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1234.794523] env[62619]: value = "session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528db49f-fd8a-7569-9486-f3f7f8cded7a" [ 1234.794523] env[62619]: _type = "Task" [ 1234.794523] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1234.801922] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528db49f-fd8a-7569-9486-f3f7f8cded7a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.304268] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': session[5222d3c1-8fdc-277d-ead2-d6e720f894aa]528db49f-fd8a-7569-9486-f3f7f8cded7a, 'name': SearchDatastore_Task, 'duration_secs': 0.009217} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.304532] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Releasing lock "[datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1235.304787] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Copying Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 20e81bd1-f994-4f0f-aa51-423b730fbfb6/20e81bd1-f994-4f0f-aa51-423b730fbfb6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1423}} [ 1235.305055] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-bb5bd5e7-bd22-4aaf-87f9-f5039d0a1bf4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.311668] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1235.311668] env[62619]: value = "task-1365481" [ 1235.311668] env[62619]: _type = "Task" [ 1235.311668] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.318780] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365481, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1235.820955] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365481, 'name': CopyVirtualDisk_Task, 'duration_secs': 0.442231} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1235.821341] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Copied Virtual Disk [datastore2] devstack-image-cache_base/a3c7be48-0721-419b-bbd6-8b4cc36c5604/a3c7be48-0721-419b-bbd6-8b4cc36c5604.vmdk to [datastore2] 20e81bd1-f994-4f0f-aa51-423b730fbfb6/20e81bd1-f994-4f0f-aa51-423b730fbfb6.vmdk {{(pid=62619) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1434}} [ 1235.821478] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Extending root virtual disk to 1048576 {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:167}} [ 1235.821697] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.ExtendVirtualDisk_Task with opID=oslo.vmware-c3e74fae-0b14-44dd-aeb7-aaf6ed812594 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1235.828060] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1235.828060] env[62619]: value = "task-1365482" [ 1235.828060] env[62619]: _type = "Task" [ 1235.828060] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1235.835876] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365482, 'name': ExtendVirtualDisk_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.337655] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365482, 'name': ExtendVirtualDisk_Task, 'duration_secs': 0.064784} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.337949] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Extended root virtual disk {{(pid=62619) _extend_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vmops.py:189}} [ 1236.338823] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c230905d-6f70-40bb-8932-65e20b2d384f {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.361077] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] 20e81bd1-f994-4f0f-aa51-423b730fbfb6/20e81bd1-f994-4f0f-aa51-423b730fbfb6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1236.361348] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f6f0d6a5-dda7-4384-b72e-40c4149fce8e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.381100] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1236.381100] env[62619]: value = "task-1365483" [ 1236.381100] env[62619]: _type = "Task" [ 1236.381100] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.388527] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365483, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1236.891336] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365483, 'name': ReconfigVM_Task, 'duration_secs': 0.270696} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1236.891742] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Reconfigured VM instance instance-0000006c to attach disk [datastore2] 20e81bd1-f994-4f0f-aa51-423b730fbfb6/20e81bd1-f994-4f0f-aa51-423b730fbfb6.vmdk or device None with type sparse {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1236.892204] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-ce70f3bd-c0e0-4a46-bd59-266cc9340980 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1236.898221] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1236.898221] env[62619]: value = "task-1365484" [ 1236.898221] env[62619]: _type = "Task" [ 1236.898221] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1236.905939] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365484, 'name': Rename_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.408569] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365484, 'name': Rename_Task, 'duration_secs': 0.147851} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1237.408842] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Powering on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1452}} [ 1237.409122] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-86927efb-02f1-4fe8-a033-87ada2ddbad7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1237.415244] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1237.415244] env[62619]: value = "task-1365485" [ 1237.415244] env[62619]: _type = "Task" [ 1237.415244] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1237.422674] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365485, 'name': PowerOnVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1237.925319] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365485, 'name': PowerOnVM_Task} progress is 66%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1238.426331] env[62619]: DEBUG oslo_vmware.api [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365485, 'name': PowerOnVM_Task, 'duration_secs': 0.839381} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1238.426588] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Powered on the VM {{(pid=62619) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1458}} [ 1238.426798] env[62619]: INFO nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Took 7.12 seconds to spawn the instance on the hypervisor. [ 1238.426983] env[62619]: DEBUG nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Checking state {{(pid=62619) _get_power_state /opt/stack/nova/nova/compute/manager.py:1791}} [ 1238.427752] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e255ad97-9480-4507-876e-68a3b06874fd {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1238.947888] env[62619]: INFO nova.compute.manager [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Took 11.77 seconds to build instance. [ 1239.089817] env[62619]: DEBUG nova.compute.manager [req-c902a242-b6ff-4832-bf40-ff3df8de2e35 req-14a04a9e-a1eb-494e-94d9-305bb3d20296 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Received event network-changed-8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1239.090035] env[62619]: DEBUG nova.compute.manager [req-c902a242-b6ff-4832-bf40-ff3df8de2e35 req-14a04a9e-a1eb-494e-94d9-305bb3d20296 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Refreshing instance network info cache due to event network-changed-8bf05b83-53d1-417d-9952-ffa126ea541d. {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11136}} [ 1239.090264] env[62619]: DEBUG oslo_concurrency.lockutils [req-c902a242-b6ff-4832-bf40-ff3df8de2e35 req-14a04a9e-a1eb-494e-94d9-305bb3d20296 service nova] Acquiring lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1239.090412] env[62619]: DEBUG oslo_concurrency.lockutils [req-c902a242-b6ff-4832-bf40-ff3df8de2e35 req-14a04a9e-a1eb-494e-94d9-305bb3d20296 service nova] Acquired lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1239.090578] env[62619]: DEBUG nova.network.neutron [req-c902a242-b6ff-4832-bf40-ff3df8de2e35 req-14a04a9e-a1eb-494e-94d9-305bb3d20296 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Refreshing network info cache for port 8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1239.449567] env[62619]: DEBUG oslo_concurrency.lockutils [None req-997cf7a3-14ca-4b3a-855b-e667c2142db4 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 13.284s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1239.812293] env[62619]: DEBUG nova.network.neutron [req-c902a242-b6ff-4832-bf40-ff3df8de2e35 req-14a04a9e-a1eb-494e-94d9-305bb3d20296 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updated VIF entry in instance network info cache for port 8bf05b83-53d1-417d-9952-ffa126ea541d. {{(pid=62619) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1239.812657] env[62619]: DEBUG nova.network.neutron [req-c902a242-b6ff-4832-bf40-ff3df8de2e35 req-14a04a9e-a1eb-494e-94d9-305bb3d20296 service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updating instance_info_cache with network_info: [{"id": "8bf05b83-53d1-417d-9952-ffa126ea541d", "address": "fa:16:3e:08:ec:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bf05b83-53", "ovs_interfaceid": "8bf05b83-53d1-417d-9952-ffa126ea541d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1240.315921] env[62619]: DEBUG oslo_concurrency.lockutils [req-c902a242-b6ff-4832-bf40-ff3df8de2e35 req-14a04a9e-a1eb-494e-94d9-305bb3d20296 service nova] Releasing lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1273.530915] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager.update_available_resource {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1274.034706] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.034972] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1274.035162] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1274.035300] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=62619) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1274.036237] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7cfaf91e-0b41-4da5-a77e-6d16568ea675 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.044348] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45338186-dc4f-4cea-bde2-14f65ddef3d6 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.057833] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed7bd3bb-f24f-4814-afda-3dbac12b67a2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.063804] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-993c18ce-e7be-4c67-aba9-a4c19c8f6e7a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1274.091087] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=181457MB free_disk=149GB free_vcpus=48 pci_devices=None {{(pid=62619) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1274.091224] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1274.091408] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1275.118079] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Instance 20e81bd1-f994-4f0f-aa51-423b730fbfb6 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=62619) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1275.118357] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Total usable vcpus: 48, total allocated vcpus: 1 {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1275.118458] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=704MB phys_disk=200GB used_disk=1GB total_vcpus=48 used_vcpus=1 pci_stats=[] {{(pid=62619) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1275.145829] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca648ae9-526a-4e57-ac3e-2c44fa28a7e2 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.153039] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef63cb93-c107-4526-86ca-43fa9d07f108 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.182740] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ed155ae1-6a34-4aac-88bd-74a8606763e0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.189254] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8dc0cc2-61e1-423e-ac2e-bb9b667771fc {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1275.201518] env[62619]: DEBUG nova.compute.provider_tree [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1275.704382] env[62619]: DEBUG nova.scheduler.client.report [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1276.209659] env[62619]: DEBUG nova.compute.resource_tracker [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=62619) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1276.209934] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 2.118s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1277.210727] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.211144] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.211144] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.211272] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=62619) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10547}} [ 1277.327492] env[62619]: DEBUG oslo_concurrency.lockutils [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1277.327742] env[62619]: DEBUG oslo_concurrency.lockutils [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" acquired by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1277.531115] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1277.531313] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Starting heal instance info cache {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9928}} [ 1277.531433] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Rebuilding the list of instances to heal {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9932}} [ 1277.831321] env[62619]: DEBUG nova.compute.utils [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Using /dev/sd instead of None {{(pid=62619) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1278.073798] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquiring lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:310}} [ 1278.073952] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Acquired lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:313}} [ 1278.074122] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Forcefully refreshing network info cache for instance {{(pid=62619) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2004}} [ 1278.074279] env[62619]: DEBUG nova.objects.instance [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Lazy-loading 'info_cache' on Instance uuid 20e81bd1-f994-4f0f-aa51-423b730fbfb6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1278.334584] env[62619]: DEBUG oslo_concurrency.lockutils [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" "released" by "nova.compute.manager.ComputeManager.reserve_block_device_name..do_reserve" :: held 1.007s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1279.397156] env[62619]: DEBUG oslo_concurrency.lockutils [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1279.397523] env[62619]: DEBUG oslo_concurrency.lockutils [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" acquired by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1279.397629] env[62619]: INFO nova.compute.manager [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Attaching volume 16852337-9b3e-4134-9843-a76fb0a4e45d to /dev/sdb [ 1279.428644] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e549160-0161-4d18-8577-932e0ec9dceb {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.435743] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e48e668-12fc-4979-8ee4-b7d6778bcd7c {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1279.448989] env[62619]: DEBUG nova.virt.block_device [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updating existing volume attachment record: d3e3d049-d535-4dd3-8790-ff9f19c39c53 {{(pid=62619) _volume_attach /opt/stack/nova/nova/virt/block_device.py:666}} [ 1279.827976] env[62619]: DEBUG nova.network.neutron [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updating instance_info_cache with network_info: [{"id": "8bf05b83-53d1-417d-9952-ffa126ea541d", "address": "fa:16:3e:08:ec:f7", "network": {"id": "9f9479ff-1125-4972-8594-0f4f91c2300f", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1875033696-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": [{"address": "10.180.180.145", "type": "floating", "version": 4, "meta": {}}]}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "60ccdf8f256c427b9767a01dad0616fd", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8614db14-cc04-466b-b309-367ab8296cda", "external-id": "nsx-vlan-transportzone-337", "segmentation_id": 337, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8bf05b83-53", "ovs_interfaceid": "8bf05b83-53d1-417d-9952-ffa126ea541d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1280.330781] env[62619]: DEBUG oslo_concurrency.lockutils [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Releasing lock "refresh_cache-20e81bd1-f994-4f0f-aa51-423b730fbfb6" {{(pid=62619) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:331}} [ 1280.330997] env[62619]: DEBUG nova.compute.manager [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updated the network info_cache for instance {{(pid=62619) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9999}} [ 1280.331234] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.331401] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.530739] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1280.531144] env[62619]: DEBUG oslo_service.periodic_task [None req-054d0ab0-ffd3-4aca-a4cb-89b4dc9d77be None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=62619) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1283.994722] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Volume attach. Driver type: vmdk {{(pid=62619) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 1283.994992] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290568', 'volume_id': '16852337-9b3e-4134-9843-a76fb0a4e45d', 'name': 'volume-16852337-9b3e-4134-9843-a76fb0a4e45d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '20e81bd1-f994-4f0f-aa51-423b730fbfb6', 'attached_at': '', 'detached_at': '', 'volume_id': '16852337-9b3e-4134-9843-a76fb0a4e45d', 'serial': '16852337-9b3e-4134-9843-a76fb0a4e45d'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 1283.995873] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9bad80ec-e5bd-4d62-aa5c-b1611d0f8779 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.011337] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5930c3c-7897-4aae-806c-27d26d2d03a7 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.034134] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Reconfiguring VM instance instance-0000006c to attach disk [datastore2] volume-16852337-9b3e-4134-9843-a76fb0a4e45d/volume-16852337-9b3e-4134-9843-a76fb0a4e45d.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 1284.034372] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-51bee681-4d41-499a-8e04-2e7b7fdca03a {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.050967] env[62619]: DEBUG oslo_vmware.api [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1284.050967] env[62619]: value = "task-1365488" [ 1284.050967] env[62619]: _type = "Task" [ 1284.050967] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.059607] env[62619]: DEBUG oslo_vmware.api [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365488, 'name': ReconfigVM_Task} progress is 6%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1284.560778] env[62619]: DEBUG oslo_vmware.api [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365488, 'name': ReconfigVM_Task, 'duration_secs': 0.320836} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1284.561084] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Reconfigured VM instance instance-0000006c to attach disk [datastore2] volume-16852337-9b3e-4134-9843-a76fb0a4e45d/volume-16852337-9b3e-4134-9843-a76fb0a4e45d.vmdk or device None with type thin {{(pid=62619) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 1284.565527] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-2c0ed31b-e70f-4866-ab76-32b0caaaa6ee {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1284.579171] env[62619]: DEBUG oslo_vmware.api [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1284.579171] env[62619]: value = "task-1365489" [ 1284.579171] env[62619]: _type = "Task" [ 1284.579171] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1284.586308] env[62619]: DEBUG oslo_vmware.api [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365489, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.088744] env[62619]: DEBUG oslo_vmware.api [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365489, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1285.590697] env[62619]: DEBUG oslo_vmware.api [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365489, 'name': ReconfigVM_Task} progress is 99%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1286.090433] env[62619]: DEBUG oslo_vmware.api [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365489, 'name': ReconfigVM_Task, 'duration_secs': 1.13282} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1286.090746] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290568', 'volume_id': '16852337-9b3e-4134-9843-a76fb0a4e45d', 'name': 'volume-16852337-9b3e-4134-9843-a76fb0a4e45d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '20e81bd1-f994-4f0f-aa51-423b730fbfb6', 'attached_at': '', 'detached_at': '', 'volume_id': '16852337-9b3e-4134-9843-a76fb0a4e45d', 'serial': '16852337-9b3e-4134-9843-a76fb0a4e45d'} {{(pid=62619) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 1287.125435] env[62619]: DEBUG nova.objects.instance [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'flavor' on Instance uuid 20e81bd1-f994-4f0f-aa51-423b730fbfb6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1287.631311] env[62619]: DEBUG oslo_concurrency.lockutils [None req-45bcc602-58a7-48e9-92ab-442e91b93b00 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" "released" by "nova.compute.manager.ComputeManager.attach_volume..do_attach_volume" :: held 8.234s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1287.831083] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1287.831344] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" acquired by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1288.334478] env[62619]: INFO nova.compute.manager [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Detaching volume 16852337-9b3e-4134-9843-a76fb0a4e45d [ 1288.365282] env[62619]: INFO nova.virt.block_device [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Attempting to driver detach volume 16852337-9b3e-4134-9843-a76fb0a4e45d from mountpoint /dev/sdb [ 1288.365643] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Volume detach. Driver type: vmdk {{(pid=62619) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 1288.365907] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290568', 'volume_id': '16852337-9b3e-4134-9843-a76fb0a4e45d', 'name': 'volume-16852337-9b3e-4134-9843-a76fb0a4e45d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '20e81bd1-f994-4f0f-aa51-423b730fbfb6', 'attached_at': '', 'detached_at': '', 'volume_id': '16852337-9b3e-4134-9843-a76fb0a4e45d', 'serial': '16852337-9b3e-4134-9843-a76fb0a4e45d'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 1288.366796] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9dd77509-e9c4-476b-85f7-4a94bd7f9925 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.388273] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6fbadc45-30df-4f2a-b0ba-972d4922a392 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.395742] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2907215a-6268-4496-afe5-94a862562cf4 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.415326] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb3303a7-d365-452c-a052-339a0bee49ec {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.431278] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] The volume has not been displaced from its original location: [datastore2] volume-16852337-9b3e-4134-9843-a76fb0a4e45d/volume-16852337-9b3e-4134-9843-a76fb0a4e45d.vmdk. No consolidation needed. {{(pid=62619) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 1288.436326] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Reconfiguring VM instance instance-0000006c to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 1288.436578] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a6149073-fcc2-4c99-9c37-22e15f84fe67 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.453472] env[62619]: DEBUG oslo_vmware.api [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1288.453472] env[62619]: value = "task-1365490" [ 1288.453472] env[62619]: _type = "Task" [ 1288.453472] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.460503] env[62619]: DEBUG oslo_vmware.api [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365490, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1288.962555] env[62619]: DEBUG oslo_vmware.api [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365490, 'name': ReconfigVM_Task, 'duration_secs': 0.204963} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1288.969931] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Reconfigured VM instance instance-0000006c to detach disk 2001 {{(pid=62619) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 1288.969931] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f271511c-487b-405d-b45f-feaf9ebd7c07 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1288.982204] env[62619]: DEBUG oslo_vmware.api [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1288.982204] env[62619]: value = "task-1365491" [ 1288.982204] env[62619]: _type = "Task" [ 1288.982204] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1288.989389] env[62619]: DEBUG oslo_vmware.api [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365491, 'name': ReconfigVM_Task} progress is 5%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1289.491916] env[62619]: DEBUG oslo_vmware.api [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365491, 'name': ReconfigVM_Task, 'duration_secs': 0.126655} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1289.491916] env[62619]: DEBUG nova.virt.vmwareapi.volumeops [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-290568', 'volume_id': '16852337-9b3e-4134-9843-a76fb0a4e45d', 'name': 'volume-16852337-9b3e-4134-9843-a76fb0a4e45d', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': '20e81bd1-f994-4f0f-aa51-423b730fbfb6', 'attached_at': '', 'detached_at': '', 'volume_id': '16852337-9b3e-4134-9843-a76fb0a4e45d', 'serial': '16852337-9b3e-4134-9843-a76fb0a4e45d'} {{(pid=62619) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 1290.031102] env[62619]: DEBUG nova.objects.instance [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'flavor' on Instance uuid 20e81bd1-f994-4f0f-aa51-423b730fbfb6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1291.038079] env[62619]: DEBUG oslo_concurrency.lockutils [None req-8ccba8fd-b053-4336-8ba1-d782c8e287d1 tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" "released" by "nova.compute.manager.ComputeManager.detach_volume..do_detach_volume" :: held 3.206s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.066070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.066070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.066070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1292.066070] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1292.066572] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1292.070113] env[62619]: INFO nova.compute.manager [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Terminating instance [ 1292.071836] env[62619]: DEBUG nova.compute.manager [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Start destroying the instance on the hypervisor. {{(pid=62619) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3150}} [ 1292.072044] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Destroying instance {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1141}} [ 1292.072859] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afe2fe40-db3d-4f27-b080-89f510c500a8 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.080431] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Powering off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1512}} [ 1292.080658] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-11279404-9771-4736-84c1-55fed8a2856e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.086792] env[62619]: DEBUG oslo_vmware.api [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1292.086792] env[62619]: value = "task-1365492" [ 1292.086792] env[62619]: _type = "Task" [ 1292.086792] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.094449] env[62619]: DEBUG oslo_vmware.api [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365492, 'name': PowerOffVM_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1292.597137] env[62619]: DEBUG oslo_vmware.api [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365492, 'name': PowerOffVM_Task, 'duration_secs': 0.180646} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1292.597439] env[62619]: DEBUG nova.virt.vmwareapi.vm_util [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Powered off the VM {{(pid=62619) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1517}} [ 1292.597613] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Unregistering the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1093}} [ 1292.597865] env[62619]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-dee2e162-6d18-42e9-957c-a2e7deb8136e {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.654758] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Unregistered the VM {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1096}} [ 1292.655023] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Deleting contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1106}} [ 1292.655181] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleting the datastore file [datastore2] 20e81bd1-f994-4f0f-aa51-423b730fbfb6 {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1292.655458] env[62619]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-9ca953b9-f951-4b21-9d7c-262c91da09c1 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1292.662418] env[62619]: DEBUG oslo_vmware.api [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for the task: (returnval){ [ 1292.662418] env[62619]: value = "task-1365494" [ 1292.662418] env[62619]: _type = "Task" [ 1292.662418] env[62619]: } to complete. {{(pid=62619) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1292.669714] env[62619]: DEBUG oslo_vmware.api [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365494, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1293.173147] env[62619]: DEBUG oslo_vmware.api [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Task: {'id': task-1365494, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.144233} completed successfully. {{(pid=62619) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1293.173540] env[62619]: DEBUG nova.virt.vmwareapi.ds_util [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleted the datastore file {{(pid=62619) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1293.173589] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Deleted contents of the VM from datastore datastore2 {{(pid=62619) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1116}} [ 1293.173760] env[62619]: DEBUG nova.virt.vmwareapi.vmops [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Instance destroyed {{(pid=62619) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1143}} [ 1293.173940] env[62619]: INFO nova.compute.manager [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Took 1.10 seconds to destroy the instance on the hypervisor. [ 1293.174255] env[62619]: DEBUG oslo.service.loopingcall [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=62619) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1293.174487] env[62619]: DEBUG nova.compute.manager [-] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Deallocating network for instance {{(pid=62619) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2289}} [ 1293.174588] env[62619]: DEBUG nova.network.neutron [-] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] deallocate_for_instance() {{(pid=62619) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1293.639622] env[62619]: DEBUG nova.compute.manager [req-9c67b5a7-3a60-40ca-b017-0b9f02f77291 req-fafe670a-21b6-4a97-97e9-876e4a1d410e service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Received event network-vif-deleted-8bf05b83-53d1-417d-9952-ffa126ea541d {{(pid=62619) external_instance_event /opt/stack/nova/nova/compute/manager.py:11131}} [ 1293.639622] env[62619]: INFO nova.compute.manager [req-9c67b5a7-3a60-40ca-b017-0b9f02f77291 req-fafe670a-21b6-4a97-97e9-876e4a1d410e service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Neutron deleted interface 8bf05b83-53d1-417d-9952-ffa126ea541d; detaching it from the instance and deleting it from the info cache [ 1293.639793] env[62619]: DEBUG nova.network.neutron [req-9c67b5a7-3a60-40ca-b017-0b9f02f77291 req-fafe670a-21b6-4a97-97e9-876e4a1d410e service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.114326] env[62619]: DEBUG nova.network.neutron [-] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Updating instance_info_cache with network_info: [] {{(pid=62619) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1294.142180] env[62619]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-fd356b2a-2e0c-4afb-8c80-0fbe5100872d {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.151549] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9362ae18-c7b4-4ddb-bc92-79dee51ba620 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1294.173927] env[62619]: DEBUG nova.compute.manager [req-9c67b5a7-3a60-40ca-b017-0b9f02f77291 req-fafe670a-21b6-4a97-97e9-876e4a1d410e service nova] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Detach interface failed, port_id=8bf05b83-53d1-417d-9952-ffa126ea541d, reason: Instance 20e81bd1-f994-4f0f-aa51-423b730fbfb6 could not be found. {{(pid=62619) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10965}} [ 1294.617640] env[62619]: INFO nova.compute.manager [-] [instance: 20e81bd1-f994-4f0f-aa51-423b730fbfb6] Took 1.44 seconds to deallocate network for instance. [ 1295.124401] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:402}} [ 1295.124649] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:407}} [ 1295.124881] env[62619]: DEBUG nova.objects.instance [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lazy-loading 'resources' on Instance uuid 20e81bd1-f994-4f0f-aa51-423b730fbfb6 {{(pid=62619) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1141}} [ 1295.659261] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-769db909-5f9c-49ba-b460-73fc61d04db0 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.667559] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-275462ff-4bb6-4cb6-9c90-5039ccb3e4ff {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.695312] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b361b16a-c03c-4a9c-8725-56c059965489 {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.701936] env[62619]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35480510-5d4b-4262-82e4-2e1c035643db {{(pid=62619) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1295.714172] env[62619]: DEBUG nova.compute.provider_tree [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed in ProviderTree for provider: c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 {{(pid=62619) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1296.216702] env[62619]: DEBUG nova.scheduler.client.report [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Inventory has not changed for provider c1b543f3-8b72-4e01-a5a8-30dc9ed76c83 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 149, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=62619) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:954}} [ 1296.721964] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 1.597s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}} [ 1296.742006] env[62619]: INFO nova.scheduler.client.report [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Deleted allocations for instance 20e81bd1-f994-4f0f-aa51-423b730fbfb6 [ 1297.251186] env[62619]: DEBUG oslo_concurrency.lockutils [None req-b674ef65-1a24-4e6b-80b9-fc6ae185833e tempest-AttachVolumeNegativeTest-207490519 tempest-AttachVolumeNegativeTest-207490519-project-member] Lock "20e81bd1-f994-4f0f-aa51-423b730fbfb6" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 5.185s {{(pid=62619) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:421}}